focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public byte[] array() { return data; }
@Test public void array() { AbstractByteBuf byteBuf = new ByteArrayWrapperByteBuf(null); Assert.assertNull(byteBuf.array()); byte[] bs = new byte[] { 1, 2, 3 }; byteBuf = new ByteArrayWrapperByteBuf(bs); Assert.assertNotNull(byteBuf.array()); Assert.assertTrue(byteBuf.array().length == 3); Assert.assertTrue(byteBuf.readableBytes() == 3); Assert.assertTrue(byteBuf.release()); }
@Override public void process(Tuple input) { String key = filterMapper.getKeyFromTuple(input); boolean found; JedisCommandsContainer jedisCommand = null; try { jedisCommand = getInstance(); switch (dataType) { case STRING: found = jedisCommand.exists(key); break; case SET: found = jedisCommand.sismember(additionalKey, key); break; case HASH: found = jedisCommand.hexists(additionalKey, key); break; case SORTED_SET: found = jedisCommand.zrank(additionalKey, key) != null; break; case HYPER_LOG_LOG: found = jedisCommand.pfcount(key) > 0; break; case GEO: List<GeoCoordinate> geopos = jedisCommand.geopos(additionalKey, key); if (geopos == null || geopos.isEmpty()) { found = false; } else { // If any entry is NOT null, then we have a match. found = geopos.stream() .anyMatch(Objects::nonNull); } break; default: throw new IllegalArgumentException("Cannot process such data type: " + dataType); } if (found) { collector.emit(input, input.getValues()); } collector.ack(input); } catch (Exception e) { this.collector.reportError(e); this.collector.fail(input); } }
@Test void smokeTest_geopos_isMember() { // Define input key final String geoKey = "ThisIsMyGeoKey"; final String inputKey = "ThisIsMyKey"; // Ensure key does exist in redis jedisHelper.geoadd(geoKey, 139.731992, 35.709026, inputKey); // Create an input tuple final Map<String, Object> values = new HashMap<>(); values.put("key", inputKey); values.put("value", "ThisIsMyValue"); final Tuple tuple = new StubTuple(values); final JedisPoolConfig config = configBuilder.build(); final TestMapper mapper = new TestMapper(GEO, geoKey); final RedisFilterBolt bolt = new RedisFilterBolt(config, mapper); bolt.prepare(new HashMap<>(), topologyContext, new OutputCollector(outputCollector)); bolt.process(tuple); // Verify Tuple passed through the bolt verifyTuplePassed(tuple); }
public AggregateAnalysisResult analyze( final ImmutableAnalysis analysis, final List<SelectExpression> finalProjection ) { if (!analysis.getGroupBy().isPresent()) { throw new IllegalArgumentException("Not an aggregate query"); } final AggAnalyzer aggAnalyzer = new AggAnalyzer(analysis, functionRegistry); aggAnalyzer.process(finalProjection); return aggAnalyzer.result(); }
@Test public void shouldThrowOnNonAggColumnsNotInGroupBy() { // Given: givenSelectExpression(COL2); givenSelectExpression(COL3); // When: final KsqlException e = assertThrows( KsqlException.class, () -> analyzer.analyze(analysis, selects) ); // Then: assertThat(e.getMessage(), containsString("Non-aggregate SELECT expression(s) not part of GROUP BY: COL2, COL3")); }
public static AwsClientFactory from(Map<String, String> properties) { String factoryImpl = PropertyUtil.propertyAsString( properties, AwsProperties.CLIENT_FACTORY, DefaultAwsClientFactory.class.getName()); return loadClientFactory(factoryImpl, properties); }
@Test public void testLoadCustom() { Map<String, String> properties = Maps.newHashMap(); properties.put(AwsProperties.CLIENT_FACTORY, CustomFactory.class.getName()); assertThat(AwsClientFactories.from(properties)) .as("should load custom class") .isInstanceOf(CustomFactory.class); }
public Range<PartitionKey> handleNewSinglePartitionDesc(Map<ColumnId, Column> schema, SingleRangePartitionDesc desc, long partitionId, boolean isTemp) throws DdlException { Range<PartitionKey> range; try { range = checkAndCreateRange(schema, desc, isTemp); setRangeInternal(partitionId, isTemp, range); } catch (IllegalArgumentException e) { // Range.closedOpen may throw this if (lower > upper) throw new DdlException("Invalid key range: " + e.getMessage()); } idToDataProperty.put(partitionId, desc.getPartitionDataProperty()); idToReplicationNum.put(partitionId, desc.getReplicationNum()); idToInMemory.put(partitionId, desc.isInMemory()); idToStorageCacheInfo.put(partitionId, desc.getDataCacheInfo()); return range; }
@Test(expected = DdlException.class) public void testTinyInt() throws DdlException, AnalysisException { Column k1 = new Column("k1", new ScalarType(PrimitiveType.TINYINT), true, null, "", ""); partitionColumns.add(k1); singleRangePartitionDescs.add(new SingleRangePartitionDesc(false, "p1", new PartitionKeyDesc(Lists.newArrayList(new PartitionValue("-128"))), null)); partitionInfo = new RangePartitionInfo(partitionColumns); for (SingleRangePartitionDesc singleRangePartitionDesc : singleRangePartitionDescs) { singleRangePartitionDesc.analyze(1, null); partitionInfo.handleNewSinglePartitionDesc(MetaUtils.buildIdToColumn(partitionColumns), singleRangePartitionDesc, 20000L, false); } }
public static Integer getBodySizeIfKnown(ZuulMessage msg) { final Integer bodySize = getContentLengthIfPresent(msg); if (bodySize != null) { return bodySize; } if (msg.hasCompleteBody()) { return msg.getBodyLength(); } return null; }
@Test void getBodySizeIfKnown_returnsContentLengthValue() { SessionContext context = new SessionContext(); Headers headers = new Headers(); headers.add(com.netflix.zuul.message.http.HttpHeaderNames.CONTENT_LENGTH, "23450"); ZuulMessage msg = new ZuulMessageImpl(context, headers); assertThat(HttpUtils.getBodySizeIfKnown(msg)).isEqualTo(Integer.valueOf(23450)); }
@Override public void put(K key, V value) { throw MODIFICATION_ATTEMPT_ERROR; }
@Test void testPut() throws Exception { assertThat(mapState.contains(1L)).isTrue(); long value = mapState.get(1L); assertThat(value).isEqualTo(5L); assertThat(mapState.contains(2L)).isTrue(); value = mapState.get(2L); assertThat(value).isEqualTo(5L); assertThatThrownBy(() -> mapState.put(2L, 54L)) .isInstanceOf(UnsupportedOperationException.class); }
@Override public ByteBuf retainedDuplicate() { return duplicate().retain(); }
@Test public void testRetainedDuplicateAfterReleaseRetainedDuplicate() { ByteBuf buf = newBuffer(1); ByteBuf buf2 = buf.retainedDuplicate(); assertRetainedDuplicateFailAfterRelease(buf, buf2); }
public int[] findMatchingLines(List<String> left, List<String> right) { int[] index = new int[right.size()]; int dbLine = left.size(); int reportLine = right.size(); try { PathNode node = new MyersDiff<String>().buildPath(left, right); while (node.prev != null) { PathNode prevNode = node.prev; if (!node.isSnake()) { // additions reportLine -= (node.j - prevNode.j); // removals dbLine -= (node.i - prevNode.i); } else { // matches for (int i = node.i; i > prevNode.i; i--) { index[reportLine - 1] = dbLine; reportLine--; dbLine--; } } node = prevNode; } } catch (DifferentiationFailedException e) { LOG.error("Error finding matching lines", e); return index; } return index; }
@Test public void shouldFindNothingWhenContentAreIdentical2() { List<String> database = new ArrayList<>(); database.add("package sample;\n"); database.add("\n"); database.add("public class Sample {\n"); database.add("\n"); database.add(" private String myMethod() {\n"); database.add(" }\n"); database.add("}\n"); List<String> report = new ArrayList<>(); report.add("package sample;\n"); report.add("\n"); report.add("public class Sample {\n"); report.add("\n"); report.add(" private String attr;\n"); report.add("\n"); report.add(" public Sample(String attr) {\n"); report.add(" this.attr = attr;\n"); report.add(" }\n"); report.add("\n"); report.add(" private String myMethod() {\n"); report.add(" }\n"); report.add("}\n"); int[] diff = new SourceLinesDiffFinder().findMatchingLines(database, report); assertThat(diff).containsExactly(1, 2, 3, 4, 0, 0, 0, 0, 0, 0, 5, 6, 7); }
public void setValue(final T value) { JiveGlobals.setProperty(key, TO_STRING.get(getConverterClass()).apply(value, this), isEncrypted()); }
@Test public void shouldEncryptAProperty() { final SystemProperty<Long> longProperty = SystemProperty.Builder.ofType(Long.class) .setKey("an-encrypted-property") .setDefaultValue(42L) .setDynamic(false) .setEncrypted(true) .build(); longProperty.setValue(84L); assertThat(JiveGlobals.isPropertyEncrypted("an-encrypted-property"), is(true)); }
private void checkConfig() throws MQClientException { Validators.checkGroup(this.defaultMQPushConsumer.getConsumerGroup()); if (null == this.defaultMQPushConsumer.getConsumerGroup()) { throw new MQClientException( "consumerGroup is null" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } if (this.defaultMQPushConsumer.getConsumerGroup().equals(MixAll.DEFAULT_CONSUMER_GROUP)) { throw new MQClientException( "consumerGroup can not equal " + MixAll.DEFAULT_CONSUMER_GROUP + ", please specify another one." + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } if (null == this.defaultMQPushConsumer.getMessageModel()) { throw new MQClientException( "messageModel is null" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } if (null == this.defaultMQPushConsumer.getConsumeFromWhere()) { throw new MQClientException( "consumeFromWhere is null" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } Date dt = UtilAll.parseDate(this.defaultMQPushConsumer.getConsumeTimestamp(), UtilAll.YYYYMMDDHHMMSS); if (null == dt) { throw new MQClientException( "consumeTimestamp is invalid, the valid format is yyyyMMddHHmmss,but received " + this.defaultMQPushConsumer.getConsumeTimestamp() + " " + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // allocateMessageQueueStrategy if (null == this.defaultMQPushConsumer.getAllocateMessageQueueStrategy()) { throw new MQClientException( "allocateMessageQueueStrategy is null" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // subscription if (null == this.defaultMQPushConsumer.getSubscription()) { throw new MQClientException( "subscription is null" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // messageListener if (null == this.defaultMQPushConsumer.getMessageListener()) { throw new MQClientException( "messageListener is null" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } boolean orderly = this.defaultMQPushConsumer.getMessageListener() instanceof MessageListenerOrderly; boolean concurrently = this.defaultMQPushConsumer.getMessageListener() instanceof MessageListenerConcurrently; if (!orderly && !concurrently) { throw new MQClientException( "messageListener must be instanceof MessageListenerOrderly or MessageListenerConcurrently" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // consumeThreadMin if (this.defaultMQPushConsumer.getConsumeThreadMin() < 1 || this.defaultMQPushConsumer.getConsumeThreadMin() > 1000) { throw new MQClientException( "consumeThreadMin Out of range [1, 1000]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // consumeThreadMax if (this.defaultMQPushConsumer.getConsumeThreadMax() < 1 || this.defaultMQPushConsumer.getConsumeThreadMax() > 1000) { throw new MQClientException( "consumeThreadMax Out of range [1, 1000]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // consumeThreadMin can't be larger than consumeThreadMax if (this.defaultMQPushConsumer.getConsumeThreadMin() > this.defaultMQPushConsumer.getConsumeThreadMax()) { throw new MQClientException( "consumeThreadMin (" + this.defaultMQPushConsumer.getConsumeThreadMin() + ") " + "is larger than consumeThreadMax (" + this.defaultMQPushConsumer.getConsumeThreadMax() + ")", null); } // consumeConcurrentlyMaxSpan if (this.defaultMQPushConsumer.getConsumeConcurrentlyMaxSpan() < 1 || this.defaultMQPushConsumer.getConsumeConcurrentlyMaxSpan() > 65535) { throw new MQClientException( "consumeConcurrentlyMaxSpan Out of range [1, 65535]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // pullThresholdForQueue if (this.defaultMQPushConsumer.getPullThresholdForQueue() < 1 || this.defaultMQPushConsumer.getPullThresholdForQueue() > 65535) { throw new MQClientException( "pullThresholdForQueue Out of range [1, 65535]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // pullThresholdForTopic if (this.defaultMQPushConsumer.getPullThresholdForTopic() != -1) { if (this.defaultMQPushConsumer.getPullThresholdForTopic() < 1 || this.defaultMQPushConsumer.getPullThresholdForTopic() > 6553500) { throw new MQClientException( "pullThresholdForTopic Out of range [1, 6553500]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } } // pullThresholdSizeForQueue if (this.defaultMQPushConsumer.getPullThresholdSizeForQueue() < 1 || this.defaultMQPushConsumer.getPullThresholdSizeForQueue() > 1024) { throw new MQClientException( "pullThresholdSizeForQueue Out of range [1, 1024]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } if (this.defaultMQPushConsumer.getPullThresholdSizeForTopic() != -1) { // pullThresholdSizeForTopic if (this.defaultMQPushConsumer.getPullThresholdSizeForTopic() < 1 || this.defaultMQPushConsumer.getPullThresholdSizeForTopic() > 102400) { throw new MQClientException( "pullThresholdSizeForTopic Out of range [1, 102400]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } } // pullInterval if (this.defaultMQPushConsumer.getPullInterval() < 0 || this.defaultMQPushConsumer.getPullInterval() > 65535) { throw new MQClientException( "pullInterval Out of range [0, 65535]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // consumeMessageBatchMaxSize if (this.defaultMQPushConsumer.getConsumeMessageBatchMaxSize() < 1 || this.defaultMQPushConsumer.getConsumeMessageBatchMaxSize() > 1024) { throw new MQClientException( "consumeMessageBatchMaxSize Out of range [1, 1024]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // pullBatchSize if (this.defaultMQPushConsumer.getPullBatchSize() < 1 || this.defaultMQPushConsumer.getPullBatchSize() > 1024) { throw new MQClientException( "pullBatchSize Out of range [1, 1024]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // popInvisibleTime if (this.defaultMQPushConsumer.getPopInvisibleTime() < MIN_POP_INVISIBLE_TIME || this.defaultMQPushConsumer.getPopInvisibleTime() > MAX_POP_INVISIBLE_TIME) { throw new MQClientException( "popInvisibleTime Out of range [" + MIN_POP_INVISIBLE_TIME + ", " + MAX_POP_INVISIBLE_TIME + "]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } // popBatchNums if (this.defaultMQPushConsumer.getPopBatchNums() <= 0 || this.defaultMQPushConsumer.getPopBatchNums() > 32) { throw new MQClientException( "popBatchNums Out of range [1, 32]" + FAQUrl.suggestTodo(FAQUrl.CLIENT_PARAMETER_CHECK_URL), null); } }
@Test public void checkConfigTest() throws MQClientException { //test type thrown.expect(MQClientException.class); //test message thrown.expectMessage("consumeThreadMin (10) is larger than consumeThreadMax (9)"); DefaultMQPushConsumer consumer = new DefaultMQPushConsumer("test_consumer_group"); consumer.setConsumeThreadMin(10); consumer.setConsumeThreadMax(9); consumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> ConsumeConcurrentlyStatus.CONSUME_SUCCESS); DefaultMQPushConsumerImpl defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(consumer, null); defaultMQPushConsumerImpl.start(); }
@PublicAPI(usage = ACCESS) public JavaPackage getPackage(String packageName) { return getValue(tryGetPackage(packageName), "This package does not contain any sub package '%s'", packageName); }
@Test public void function_GET_RELATIVE_NAME() { JavaPackage defaultPackage = importDefaultPackage(Object.class); String name = GET_RELATIVE_NAME.apply(defaultPackage.getPackage("java.lang")); assertThat(name).isEqualTo("lang"); }
@Override public HttpResponse send(HttpRequest httpRequest) throws IOException { return send(httpRequest, null); }
@Test public void send_whenInvalidCertificatesAreNotIgnored_throws() throws GeneralSecurityException, IOException { InetAddress loopbackAddress = InetAddress.getLoopbackAddress(); String host = "host.com"; MockWebServer mockWebServer = startMockWebServerWithSsl(loopbackAddress); int port = mockWebServer.url("/").port(); NetworkService networkService = NetworkService.newBuilder() .setNetworkEndpoint( NetworkEndpointUtils.forIpHostnameAndPort( loopbackAddress.getHostAddress(), host, port)) .build(); HttpClientCliOptions cliOptions = new HttpClientCliOptions(); HttpClientConfigProperties configProperties = new HttpClientConfigProperties(); cliOptions.trustAllCertificates = configProperties.trustAllCertificates = false; HttpClient httpClient = Guice.createInjector( new AbstractModule() { @Override protected void configure() { install(new HttpClientModule.Builder().build()); bind(HttpClientCliOptions.class).toInstance(cliOptions); bind(HttpClientConfigProperties.class).toInstance(configProperties); } }) .getInstance(HttpClient.class); assertThrows( SSLException.class, () -> httpClient.send( get(String.format("https://%s:%d", host, port)).withEmptyHeaders().build(), networkService)); mockWebServer.shutdown(); }
boolean memberInternalAddressAsDefinedInClientConfig(Collection<Member> members) { List<String> addresses = config.getAddresses(); List<String> resolvedHosts = addresses.stream().map(s -> { try { return InetAddress.getByName(AddressUtil.getAddressHolder(s, -1).getAddress()).getHostAddress(); } catch (UnknownHostException e) { return null; } }).filter(Objects::nonNull).collect(Collectors.toList()); return members.stream() .map(memberInfo -> { try { return memberInfo.getAddress().getInetAddress().getHostAddress(); } catch (UnknownHostException e) { return null; } }).anyMatch(resolvedHosts::contains); }
@Test public void memberInternalAddressAsDefinedInClientConfig() { // given config.getNetworkConfig().addAddress("127.0.0.1"); TranslateToPublicAddressProvider translateProvider = createTranslateProvider(); // when translateProvider.init(new InitialMembershipEvent(mock(Cluster.class), new HashSet<>(asList(member("192.168.0.1"), member("127.0.0.1"))))); boolean result = translateProvider.getAsBoolean(); // then assertFalse(result); }
public IsJson(Matcher<? super ReadContext> jsonMatcher) { this.jsonMatcher = jsonMatcher; }
@Test public void shouldMatchJsonFileEvaluatedToTrue() { assertThat(BOOKS_JSON_FILE, isJson(withPathEvaluatedTo(true))); }
public static Object[] realize(Object[] objs, Class<?>[] types) { if (objs.length != types.length) { throw new IllegalArgumentException("args.length != types.length"); } Object[] dests = new Object[objs.length]; for (int i = 0; i < objs.length; i++) { dests[i] = realize(objs[i], types[i]); } return dests; }
@Test void testMapToInterface() throws Exception { Map map = new HashMap(); map.put("content", "greeting"); map.put("from", "dubbo"); map.put("urgent", true); Object o = PojoUtils.realize(map, Message.class); Message message = (Message) o; assertThat(message.getContent(), equalTo("greeting")); assertThat(message.getFrom(), equalTo("dubbo")); assertTrue(message.isUrgent()); }
public static Description getDescriptionForSimulation(Optional<String> fullFileName, List<ScenarioWithIndex> scenarios) { String testSuiteName = fullFileName.isPresent() ? getScesimFileName(fullFileName.get()) : AbstractScenarioRunner.class.getSimpleName(); Description suiteDescription = Description.createSuiteDescription(testSuiteName); scenarios.forEach(scenarioWithIndex -> suiteDescription.addChild( getDescriptionForScenario(fullFileName, scenarioWithIndex.getIndex(), scenarioWithIndex.getScesimData().getDescription()))); return suiteDescription; }
@Test public void getDescriptionForSimulationByClassNameAndSimulation() { Description retrieved = AbstractScenarioRunner.getDescriptionForSimulation(Optional.empty(), scenarioRunnerDTOLocal.getScenarioWithIndices()); commonVerifyDescriptionForSimulation(retrieved, AbstractScenarioRunner.class.getSimpleName()); retrieved = AbstractScenarioRunner.getDescriptionForSimulation(Optional.of("src/test/Test.scesim"), scenarioRunnerDTOLocal.getScenarioWithIndices()); commonVerifyDescriptionForSimulation(retrieved, "Test"); }
public static ParamType getVarArgsSchemaFromType(final Type type) { return getSchemaFromType(type, VARARGS_JAVA_TO_ARG_TYPE); }
@Test public void shouldGetStringSchemaFromStringClassVariadic() { assertThat( UdfUtil.getVarArgsSchemaFromType(String.class), equalTo(ParamTypes.STRING) ); }
@Override @Deprecated public <VR> KStream<K, VR> transformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, ? extends VR> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), NamedInternal.empty(), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullNamedOnTransformValuesWithValueTransformerWithKeySupplier() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.transformValues( valueTransformerWithKeySupplier, (Named) null)); assertThat(exception.getMessage(), equalTo("named can't be null")); }
public void retrieveDocuments() throws DocumentRetrieverException { boolean first = true; String route = params.cluster.isEmpty() ? params.route : resolveClusterRoute(params.cluster); MessageBusParams messageBusParams = createMessageBusParams(params.configId, params.timeout, route); documentAccess = documentAccessFactory.createDocumentAccess(messageBusParams); session = documentAccess.createSyncSession(new SyncParameters.Builder().build()); int trace = params.traceLevel; if (trace > 0) { session.setTraceLevel(trace); } Iterator<String> iter = params.documentIds; if (params.jsonOutput && !params.printIdsOnly) { System.out.println('['); } while (iter.hasNext()) { if (params.jsonOutput && !params.printIdsOnly) { if (!first) { System.out.println(','); } else { first = false; } } String docid = iter.next(); Message msg = createDocumentRequest(docid); Reply reply = session.syncSend(msg); printReply(reply); } if (params.jsonOutput && !params.printIdsOnly) { System.out.println(']'); } }
@Test @SuppressWarnings("removal") void testSendSingleMessage() throws DocumentRetrieverException { ClientParameters params = createParameters() .setDocumentIds(asIterator(DOC_ID_1)) .setPriority(DocumentProtocol.Priority.HIGH_1) .setNoRetry(true) .build(); when(mockedSession.syncSend(any())).thenReturn(createDocumentReply(DOC_ID_1)); DocumentRetriever documentRetriever = new DocumentRetriever( new ClusterList(), mockedFactory, params); documentRetriever.retrieveDocuments(); verify(mockedSession, times(1)).syncSend(argThat((ArgumentMatcher<GetDocumentMessage>) o -> o.getPriority().equals(DocumentProtocol.Priority.HIGH_1) && // TODO remove on Vespa 9 !o.getRetryEnabled())); assertContainsDocument(DOC_ID_1); }
public boolean containsProblemOfType(String type) { return this.stream().anyMatch(problem -> type.equals(problem.type)); }
@Test void testContainsProblemOfType() { problems.addProblem(new CpuAllocationIrregularityProblem(new ArrayList<>())); assertThat(problems.containsProblemOfType(CpuAllocationIrregularityProblem.PROBLEM_TYPE)).isTrue(); assertThat(problems.containsProblemOfType(PollIntervalInSecondsTimeBoxIsTooSmallProblem.PROBLEM_TYPE)).isFalse(); }
@SuppressWarnings("unchecked") public static <T extends FEELFunction> T getFunction(Class<T> functionClazz) { return Stream.of(FUNCTIONS) .filter(f -> functionClazz.isAssignableFrom(f.getClass())) .map(f -> (T) f) .findFirst() .orElseThrow(() -> new IllegalArgumentException("Cannot find function by class " + functionClazz.getCanonicalName() + "!")); }
@Test void getFunctionsByNameFails() { assertThrows(IllegalArgumentException.class, () -> BuiltInFunctions.getFunction(FakeFunction.FAKE_NAME)); }
public String toBaseMessageIdString(Object messageId) { if (messageId == null) { return null; } else if (messageId instanceof String) { String stringId = (String) messageId; // If the given string has a type encoding prefix, // we need to escape it as an encoded string (even if // the existing encoding prefix was also for string) if (hasTypeEncodingPrefix(stringId)) { return AMQP_STRING_PREFIX + stringId; } else { return stringId; } } else if (messageId instanceof UUID) { return AMQP_UUID_PREFIX + messageId.toString(); } else if (messageId instanceof UnsignedLong) { return AMQP_ULONG_PREFIX + messageId.toString(); } else if (messageId instanceof Binary) { ByteBuffer dup = ((Binary) messageId).asByteBuffer(); byte[] bytes = new byte[dup.remaining()]; dup.get(bytes); String hex = convertBinaryToHexString(bytes); return AMQP_BINARY_PREFIX + hex; } else { throw new IllegalArgumentException("Unsupported type provided: " + messageId.getClass()); } }
@Test public void testToBaseMessageIdStringWithStringBeginningWithEncodingPrefixForLong() { String longStringMessageId = AMQPMessageIdHelper.AMQP_ULONG_PREFIX + 123456789L; String expected = AMQPMessageIdHelper.AMQP_STRING_PREFIX + longStringMessageId; String baseMessageIdString = messageIdHelper.toBaseMessageIdString(longStringMessageId); assertNotNull("null string should not have been returned", baseMessageIdString); assertEquals("expected base id string was not returned", expected, baseMessageIdString); }
public WorkflowActionResponse deactivate(String workflowId, User caller) { Checks.notNull(caller, "caller cannot be null to deactivate workflow [%s]", workflowId); String timeline = workflowDao.deactivate(workflowId, caller); LOG.info(timeline); TimelineEvent event = TimelineActionEvent.builder() .action(Actions.WorkflowAction.DEACTIVATE) .author(caller) .message(timeline) .build(); return WorkflowActionResponse.from(workflowId, event); }
@Test public void testDeactivateError() { AssertHelper.assertThrows( "caller cannot be null to deactivate workflow", NullPointerException.class, "caller cannot be null to deactivate workflow [sample-minimal-wf]", () -> actionHandler.deactivate("sample-minimal-wf", null)); }
@Override public void updateService(Service service, AbstractSelector selector) throws NacosException { NAMING_LOGGER.info("[UPDATE-SERVICE] {} updating service : {}", namespaceId, service); final Map<String, String> params = new HashMap<>(16); params.put(CommonParams.NAMESPACE_ID, namespaceId); params.put(CommonParams.SERVICE_NAME, service.getName()); params.put(CommonParams.GROUP_NAME, service.getGroupName()); params.put(PROTECT_THRESHOLD_PARAM, String.valueOf(service.getProtectThreshold())); params.put(META_PARAM, JacksonUtils.toJson(service.getMetadata())); params.put(SELECTOR_PARAM, JacksonUtils.toJson(selector)); reqApi(UtilAndComs.nacosUrlService, params, HttpMethod.PUT); }
@Test void testUpdateService() throws Exception { //given NacosRestTemplate nacosRestTemplate = mock(NacosRestTemplate.class); HttpRestResult<Object> a = new HttpRestResult<Object>(); a.setData(""); a.setCode(200); when(nacosRestTemplate.exchangeForm(any(), any(), any(), any(), any(), any())).thenReturn(a); final Field nacosRestTemplateField = NamingHttpClientProxy.class.getDeclaredField("nacosRestTemplate"); nacosRestTemplateField.setAccessible(true); nacosRestTemplateField.set(clientProxy, nacosRestTemplate); String serviceName = "service1"; String groupName = "group1"; //when clientProxy.updateService(new Service(), new NoneSelector()); //then verify(nacosRestTemplate, times(1)).exchangeForm(endsWith(UtilAndComs.nacosUrlService), any(), any(), any(), eq(HttpMethod.PUT), any()); }
@Override public long connectionDelay(Node node, long now) { return connectionStates.connectionDelay(node.idString(), now); }
@Test public void testConnectionDelayWithNoExponentialBackoff() { long now = time.milliseconds(); long delay = clientWithNoExponentialBackoff.connectionDelay(node, now); assertEquals(0, delay); }
@Override public <T> void execute(URI uri, String httpMethod, RequestHttpEntity requestHttpEntity, final ResponseHandler<T> responseHandler, final Callback<T> callback) throws Exception { HttpRequestBase httpRequestBase = DefaultHttpClientRequest.build(uri, httpMethod, requestHttpEntity, defaultConfig); try { asyncClient.execute(httpRequestBase, new FutureCallback<HttpResponse>() { @Override public void completed(HttpResponse result) { DefaultClientHttpResponse response = new DefaultClientHttpResponse(result); try { HttpRestResult<T> httpRestResult = responseHandler.handle(response); callback.onReceive(httpRestResult); } catch (Exception e) { callback.onError(e); } finally { HttpClientUtils.closeQuietly(result); } } @Override public void failed(Exception ex) { callback.onError(ex); } @Override public void cancelled() { callback.onCancel(); } }); } catch (IllegalStateException e) { final List<ExceptionEvent> events = ioreactor.getAuditLog(); if (events != null) { for (ExceptionEvent event : events) { if (event != null) { LOGGER.error("[DefaultAsyncHttpClientRequest] IllegalStateException! I/O Reactor error time: {}", event.getTimestamp(), event.getCause()); } } } throw e; } }
@Test void testExecuteOnComplete() throws Exception { Header header = Header.newInstance(); Map<String, String> body = new HashMap<>(); body.put("test", "test"); RequestHttpEntity httpEntity = new RequestHttpEntity(header, Query.EMPTY, body); HttpResponse response = mock(HttpResponse.class); HttpRestResult restResult = new HttpRestResult(); when(responseHandler.handle(any())).thenReturn(restResult); when(client.execute(any(), any())).thenAnswer(invocationOnMock -> { ((FutureCallback) invocationOnMock.getArgument(1)).completed(response); return null; }); httpClientRequest.execute(uri, "PUT", httpEntity, responseHandler, callback); verify(callback).onReceive(restResult); }
@Override public ImagesAndRegistryClient call() throws IOException, RegistryException, LayerPropertyNotFoundException, LayerCountMismatchException, BadContainerConfigurationFormatException, CacheCorruptedException, CredentialRetrievalException { EventHandlers eventHandlers = buildContext.getEventHandlers(); try (ProgressEventDispatcher progressDispatcher = progressDispatcherFactory.create("pulling base image manifest", 4); TimerEventDispatcher ignored1 = new TimerEventDispatcher(eventHandlers, DESCRIPTION)) { // Skip this step if this is a scratch image ImageReference imageReference = buildContext.getBaseImageConfiguration().getImage(); if (imageReference.isScratch()) { Set<Platform> platforms = buildContext.getContainerConfiguration().getPlatforms(); Verify.verify(!platforms.isEmpty()); eventHandlers.dispatch(LogEvent.progress("Getting scratch base image...")); ImmutableList.Builder<Image> images = ImmutableList.builder(); for (Platform platform : platforms) { Image.Builder imageBuilder = Image.builder(buildContext.getTargetFormat()); imageBuilder.setArchitecture(platform.getArchitecture()).setOs(platform.getOs()); images.add(imageBuilder.build()); } return new ImagesAndRegistryClient(images.build(), null); } eventHandlers.dispatch( LogEvent.progress("Getting manifest for base image " + imageReference + "...")); if (buildContext.isOffline()) { List<Image> images = getCachedBaseImages(); if (!images.isEmpty()) { return new ImagesAndRegistryClient(images, null); } throw new IOException( "Cannot run Jib in offline mode; " + imageReference + " not found in local Jib cache"); } else if (imageReference.getDigest().isPresent()) { List<Image> images = getCachedBaseImages(); if (!images.isEmpty()) { RegistryClient noAuthRegistryClient = buildContext.newBaseImageRegistryClientFactory().newRegistryClient(); // TODO: passing noAuthRegistryClient may be problematic. It may return 401 unauthorized // if layers have to be downloaded. // https://github.com/GoogleContainerTools/jib/issues/2220 return new ImagesAndRegistryClient(images, noAuthRegistryClient); } } Optional<ImagesAndRegistryClient> mirrorPull = tryMirrors(buildContext, progressDispatcher.newChildProducer()); if (mirrorPull.isPresent()) { return mirrorPull.get(); } try { // First, try with no credentials. This works with public GCR images (but not Docker Hub). // TODO: investigate if we should just pass credentials up front. However, this involves // some risk. https://github.com/GoogleContainerTools/jib/pull/2200#discussion_r359069026 // contains some related discussions. RegistryClient noAuthRegistryClient = buildContext.newBaseImageRegistryClientFactory().newRegistryClient(); return new ImagesAndRegistryClient( pullBaseImages(noAuthRegistryClient, progressDispatcher.newChildProducer()), noAuthRegistryClient); } catch (RegistryUnauthorizedException ex) { eventHandlers.dispatch( LogEvent.lifecycle( "The base image requires auth. Trying again for " + imageReference + "...")); Credential credential = RegistryCredentialRetriever.getBaseImageCredential(buildContext).orElse(null); RegistryClient registryClient = buildContext .newBaseImageRegistryClientFactory() .setCredential(credential) .newRegistryClient(); String wwwAuthenticate = ex.getHttpResponseException().getHeaders().getAuthenticate(); if (wwwAuthenticate != null) { eventHandlers.dispatch( LogEvent.debug("WWW-Authenticate for " + imageReference + ": " + wwwAuthenticate)); registryClient.authPullByWwwAuthenticate(wwwAuthenticate); return new ImagesAndRegistryClient( pullBaseImages(registryClient, progressDispatcher.newChildProducer()), registryClient); } else { // Not getting WWW-Authenticate is unexpected in practice, and we may just blame the // server and fail. However, to keep some old behavior, try a few things as a last resort. // TODO: consider removing this fallback branch. if (credential != null && !credential.isOAuth2RefreshToken()) { eventHandlers.dispatch( LogEvent.debug("Trying basic auth as fallback for " + imageReference + "...")); registryClient.configureBasicAuth(); try { return new ImagesAndRegistryClient( pullBaseImages(registryClient, progressDispatcher.newChildProducer()), registryClient); } catch (RegistryUnauthorizedException ignored) { // Fall back to try bearer auth. } } eventHandlers.dispatch( LogEvent.debug("Trying bearer auth as fallback for " + imageReference + "...")); registryClient.doPullBearerAuth(); return new ImagesAndRegistryClient( pullBaseImages(registryClient, progressDispatcher.newChildProducer()), registryClient); } } } }
@Test public void testCall_scratch_multiplePlatforms() throws LayerPropertyNotFoundException, IOException, RegistryException, LayerCountMismatchException, BadContainerConfigurationFormatException, CacheCorruptedException, CredentialRetrievalException { Mockito.when(imageConfiguration.getImage()).thenReturn(ImageReference.scratch()); Mockito.when(containerConfig.getPlatforms()) .thenReturn( ImmutableSet.of( new Platform("architecture1", "os1"), new Platform("architecture2", "os2"))); ImagesAndRegistryClient result = pullBaseImageStep.call(); Assert.assertEquals(2, result.images.size()); Assert.assertEquals("architecture1", result.images.get(0).getArchitecture()); Assert.assertEquals("os1", result.images.get(0).getOs()); Assert.assertEquals("architecture2", result.images.get(1).getArchitecture()); Assert.assertEquals("os2", result.images.get(1).getOs()); Assert.assertNull(result.registryClient); }
public FloatArrayAsIterable usingTolerance(double tolerance) { return new FloatArrayAsIterable(tolerance(tolerance), iterableSubject()); }
@Test public void usingTolerance_contains_failureWithNaN() { expectFailureWhenTestingThat(array(1.1f, NaN, 3.3f)) .usingTolerance(DEFAULT_TOLERANCE) .contains(NaN); assertFailureKeys("value of", "expected to contain", "testing whether", "but was"); assertFailureValue("expected to contain", "NaN"); assertFailureValue("but was", "[" + 1.1f + ", NaN, " + 3.3f + "]"); }
@Override protected void processOptions(LinkedList<String> args) throws IOException { CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, OPTION_PATHONLY, OPTION_DIRECTORY, OPTION_HUMAN, OPTION_HIDENONPRINTABLE, OPTION_RECURSIVE, OPTION_REVERSE, OPTION_MTIME, OPTION_SIZE, OPTION_ATIME, OPTION_ECPOLICY); cf.parse(args); pathOnly = cf.getOpt(OPTION_PATHONLY); dirRecurse = !cf.getOpt(OPTION_DIRECTORY); setRecursive(cf.getOpt(OPTION_RECURSIVE) && dirRecurse); humanReadable = cf.getOpt(OPTION_HUMAN); hideNonPrintable = cf.getOpt(OPTION_HIDENONPRINTABLE); orderReverse = cf.getOpt(OPTION_REVERSE); orderTime = cf.getOpt(OPTION_MTIME); orderSize = !orderTime && cf.getOpt(OPTION_SIZE); useAtime = cf.getOpt(OPTION_ATIME); displayECPolicy = cf.getOpt(OPTION_ECPOLICY); if (args.isEmpty()) args.add(Path.CUR_DIR); initialiseOrderComparator(); }
@Test public void processPathDirOrderLength() throws IOException { TestFile testfile01 = new TestFile("testDirectory", "testFile01"); TestFile testfile02 = new TestFile("testDirectory", "testFile02"); TestFile testfile03 = new TestFile("testDirectory", "testFile03"); TestFile testfile04 = new TestFile("testDirectory", "testFile04"); TestFile testfile05 = new TestFile("testDirectory", "testFile05"); TestFile testfile06 = new TestFile("testDirectory", "testFile06"); // set file length in different order to file names long length = 1234567890; testfile01.setLength(length + 10); testfile02.setLength(length + 30); testfile03.setLength(length + 20); testfile04.setLength(length + 60); testfile05.setLength(length + 50); testfile06.setLength(length + 40); TestFile testDir = new TestFile("", "testDirectory"); testDir.setIsDir(true); testDir.addContents(testfile01, testfile02, testfile03, testfile04, testfile05, testfile06); LinkedList<PathData> pathData = new LinkedList<PathData>(); pathData.add(testDir.getPathData()); PrintStream out = mock(PrintStream.class); Ls ls = new Ls(); ls.out = out; LinkedList<String> options = new LinkedList<String>(); options.add("-S"); ls.processOptions(options); String lineFormat = TestFile.computeLineFormat(pathData); ls.processArguments(pathData); InOrder inOrder = inOrder(out); inOrder.verify(out).println("Found 6 items"); inOrder.verify(out).println(testfile04.formatLineMtime(lineFormat)); inOrder.verify(out).println(testfile05.formatLineMtime(lineFormat)); inOrder.verify(out).println(testfile06.formatLineMtime(lineFormat)); inOrder.verify(out).println(testfile02.formatLineMtime(lineFormat)); inOrder.verify(out).println(testfile03.formatLineMtime(lineFormat)); inOrder.verify(out).println(testfile01.formatLineMtime(lineFormat)); verifyNoMoreInteractions(out); }
public ServerStreamTracer.Factory getMetricsServerTracerFactory(MeterRegistry registry) { return new MetricsServerTracerFactory(registry); }
@Test void serverBasicMetrics() { MetricsServerStreamTracers localServerStreamTracers = new MetricsServerStreamTracers(fakeClock.getStopwatchSupplier()); ServerStreamTracer.Factory tracerFactory = localServerStreamTracers.getMetricsServerTracerFactory(meterRegistry); ServerStreamTracer tracer = tracerFactory.newServerStreamTracer(method.getFullMethodName(), new Metadata()); tracer.serverCallStarted( new CallInfo<>(method, Attributes.EMPTY, null)); assertThat(meterRegistry.get(SERVER_CALL_STARTED) .tag(GRPC_METHOD_TAG_KEY, FULL_METHOD_NAME) .tag(INSTRUMENTATION_SOURCE_TAG_KEY, INSTRUMENTATION_SOURCE_TAG_VALUE) .tag(INSTRUMENTATION_VERSION_TAG_KEY, INSTRUMENTATION_VERSION_TAG_VALUE) .counter() .count()).isEqualTo(1); tracer.inboundWireSize(34); fakeClock.forwardTime(26, MILLISECONDS); tracer.outboundWireSize(1028); tracer.inboundWireSize(154); tracer.outboundWireSize(99); fakeClock.forwardTime(14, MILLISECONDS); tracer.streamClosed(Status.CANCELLED); HistogramSnapshot sentMessageSizeSnapShot = meterRegistry.get(SERVER_SENT_COMPRESSED_MESSAGE_SIZE) .tag(GRPC_METHOD_TAG_KEY, FULL_METHOD_NAME) .tag(GRPC_STATUS_TAG_KEY, Status.Code.CANCELLED.toString()) .tag(INSTRUMENTATION_SOURCE_TAG_KEY, INSTRUMENTATION_SOURCE_TAG_VALUE) .tag(INSTRUMENTATION_VERSION_TAG_KEY, INSTRUMENTATION_VERSION_TAG_VALUE) .summary() .takeSnapshot(); HistogramSnapshot expectedSentMessageSizeHistogram = HistogramSnapshot.empty(1L, 1127L, 1127L); assertThat(sentMessageSizeSnapShot.count()).isEqualTo(expectedSentMessageSizeHistogram.count()); assertThat(sentMessageSizeSnapShot.total()).isEqualTo(expectedSentMessageSizeHistogram.total()); assertThat(sentMessageSizeSnapShot.histogramCounts()).contains(new CountAtBucket(2048.0, 1)); HistogramSnapshot receivedMessageSizeSnapShot = meterRegistry.get(SERVER_RECEIVED_COMPRESSED_MESSAGE_SIZE) .tag(GRPC_METHOD_TAG_KEY, FULL_METHOD_NAME) .tag(GRPC_STATUS_TAG_KEY, Status.Code.CANCELLED.toString()) .tag(INSTRUMENTATION_SOURCE_TAG_KEY, INSTRUMENTATION_SOURCE_TAG_VALUE) .tag(INSTRUMENTATION_VERSION_TAG_KEY, INSTRUMENTATION_VERSION_TAG_VALUE) .summary() .takeSnapshot(); HistogramSnapshot expectedReceivedMessageSizeHistogram = HistogramSnapshot.empty(1L, 188L, 188L); assertThat(receivedMessageSizeSnapShot.count()).isEqualTo(expectedReceivedMessageSizeHistogram.count()); assertThat(receivedMessageSizeSnapShot.total()).isEqualTo(expectedReceivedMessageSizeHistogram.total()); assertThat(receivedMessageSizeSnapShot.histogramCounts()).contains(new CountAtBucket(1024.0, 1)); // TODO(dnvindhya) : Figure out a way to generate normal histogram instead of cumulative histogram // with fixed buckets /* * assertThat(receivedMessageSizeSnapShot.histogramCounts()).contains(new CountAtBucket(1024.0, 1), new * CountAtBucket(2048.0, 0)); */ HistogramSnapshot callDurationSnapshot = meterRegistry.get(SERVER_CALL_DURATION) .tag(GRPC_METHOD_TAG_KEY, FULL_METHOD_NAME) .tag(GRPC_STATUS_TAG_KEY, Status.Code.CANCELLED.toString()) .tag(INSTRUMENTATION_SOURCE_TAG_KEY, INSTRUMENTATION_SOURCE_TAG_VALUE) .tag(INSTRUMENTATION_VERSION_TAG_KEY, INSTRUMENTATION_VERSION_TAG_VALUE) .timer() .takeSnapshot(); HistogramSnapshot expectedCallDurationHistogram = HistogramSnapshot.empty(1L, 40L, 40); assertThat(callDurationSnapshot.count()).isEqualTo(expectedCallDurationHistogram.count()); assertThat(callDurationSnapshot.total(MILLISECONDS)).isEqualTo(expectedCallDurationHistogram.total()); assertThat(callDurationSnapshot.histogramCounts()).contains(new CountAtBucket(4.0E7, 1)); }
public static StatementExecutorResponse execute( final ConfiguredStatement<AssertTopic> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { return AssertExecutor.execute(statement.getMaskedStatementText(), statement.getStatement(), executionContext.getKsqlConfig().getInt(KSQL_ASSERT_TOPIC_DEFAULT_TIMEOUT_MS), serviceContext, (stmt, sc) -> assertTopic( sc.getTopicClient(), ((AssertTopic) stmt).getTopic(), ((AssertTopic) stmt).getConfig(), stmt.checkExists()), (str, stmt) -> new AssertTopicEntity( str, ((AssertTopic) stmt).getTopic(), stmt.checkExists()) ); }
@Test public void shouldFailToAssertWrongConfigs() { // Given: final Map<String, Literal> configs = ImmutableMap.of( "partitions", new IntegerLiteral(10), "replicas", new IntegerLiteral(10), "abc", new IntegerLiteral(23)); final AssertTopic assertTopic = new AssertTopic(Optional.empty(), "topicName", configs, Optional.empty(), true); final ConfiguredStatement<AssertTopic> statement = ConfiguredStatement .of(KsqlParser.PreparedStatement.of("", assertTopic), SessionConfig.of(ksqlConfig, ImmutableMap.of())); // When: final KsqlRestException e = assertThrows(KsqlRestException.class, () -> AssertTopicExecutor.execute(statement, mock(SessionProperties.class), engine, serviceContext)); // Then: assertThat(e.getResponse().getStatus(), is(417)); assertThat(((KsqlErrorMessage) e.getResponse().getEntity()).getMessage(), is("Mismatched configuration for topic topicName: For config partitions, expected 10 got 1\n" + "Mismatched configuration for topic topicName: For config replicas, expected 10 got 0\n" + "Cannot assert unknown topic property: abc")); }
public void loadXML( Node transnode, Repository rep, boolean setInternalVariables ) throws KettleXMLException, KettleMissingPluginsException { loadXML( transnode, rep, setInternalVariables, null ); }
@Test public void testLoadXml() throws KettleException { String directory = "/home/admin"; Node jobNode = Mockito.mock( Node.class ); NodeList nodeList = new NodeList() { final ArrayList<Node> nodes = new ArrayList<>(); { Node nodeInfo = Mockito.mock( Node.class ); Mockito.when( nodeInfo.getNodeName() ).thenReturn( TransMeta.XML_TAG_INFO ); Mockito.when( nodeInfo.getChildNodes() ).thenReturn( this ); Node nodeDirectory = Mockito.mock( Node.class ); Mockito.when( nodeDirectory.getNodeName() ).thenReturn( "directory" ); Node child = Mockito.mock( Node.class ); Mockito.when( nodeDirectory.getFirstChild() ).thenReturn( child ); Mockito.when( child.getNodeValue() ).thenReturn( directory ); nodes.add( nodeDirectory ); nodes.add( nodeInfo ); } @Override public Node item( int index ) { return nodes.get( index ); } @Override public int getLength() { return nodes.size(); } }; Mockito.when( jobNode.getChildNodes() ).thenReturn( nodeList ); Repository rep = Mockito.mock( Repository.class ); RepositoryDirectory repDirectory = new RepositoryDirectory( new RepositoryDirectory( new RepositoryDirectory(), "home" ), "admin" ); Mockito.when( rep.findDirectory( Mockito.eq( directory ) ) ).thenReturn( repDirectory ); TransMeta meta = new TransMeta(); VariableSpace variableSpace = Mockito.mock( VariableSpace.class ); Mockito.when( variableSpace.listVariables() ).thenReturn( new String[ 0 ] ); meta.loadXML( jobNode, null, Mockito.mock( IMetaStore.class ), rep, false, variableSpace, Mockito.mock( OverwritePrompter.class ) ); meta.setInternalKettleVariables( null ); assertEquals( repDirectory.getPath(), meta.getVariable( Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY ) ); }
public static String getUnresolvedSchemaName(final Schema schema) { if (!isUnresolvedSchema(schema)) { throw new IllegalArgumentException("Not a unresolved schema: " + schema); } return schema.getProp(UR_SCHEMA_ATTR); }
@Test(expected = IllegalArgumentException.class) public void testIsUnresolvedSchemaError1() { // No "org.apache.avro.idl.unresolved.name" property Schema s = SchemaBuilder.record("R").fields().endRecord(); SchemaResolver.getUnresolvedSchemaName(s); }
public static int getFactoryId(String prop, int defaultId) { final String value = System.getProperty(prop); if (value != null) { try { return Integer.parseInt(value); } catch (NumberFormatException e) { Logger.getLogger(FactoryIdHelper.class).finest("Parameter for property prop could not be parsed", e); } } return defaultId; }
@Test public void testPropWithInValidNumber() throws Exception { String key = "hazelcast.test.prop"; System.setProperty(key, "NaN"); int factoryId = FactoryIdHelper.getFactoryId(key, 10); assertEquals(10, factoryId); }
@Override public void operationComplete(F future) throws Exception { InternalLogger internalLogger = logNotifyFailure ? logger : null; if (future.isSuccess()) { V result = future.get(); for (Promise<? super V> p: promises) { PromiseNotificationUtil.trySuccess(p, result, internalLogger); } } else if (future.isCancelled()) { for (Promise<? super V> p: promises) { PromiseNotificationUtil.tryCancel(p, internalLogger); } } else { Throwable cause = future.cause(); for (Promise<? super V> p: promises) { PromiseNotificationUtil.tryFailure(p, cause, internalLogger); } } }
@Test public void testListenerFailure() throws Exception { @SuppressWarnings("unchecked") Promise<Void> p1 = mock(Promise.class); @SuppressWarnings("unchecked") Promise<Void> p2 = mock(Promise.class); @SuppressWarnings("unchecked") PromiseNotifier<Void, Future<Void>> notifier = new PromiseNotifier<Void, Future<Void>>(p1, p2); @SuppressWarnings("unchecked") Future<Void> future = mock(Future.class); Throwable t = mock(Throwable.class); when(future.isSuccess()).thenReturn(false); when(future.isCancelled()).thenReturn(false); when(future.cause()).thenReturn(t); when(p1.tryFailure(t)).thenReturn(true); when(p2.tryFailure(t)).thenReturn(true); notifier.operationComplete(future); verify(p1).tryFailure(t); verify(p2).tryFailure(t); }
@Override public Predicate negate() { int size = predicates.length; Predicate[] inners = new Predicate[size]; for (int i = 0; i < size; i++) { Predicate original = predicates[i]; Predicate negated; if (original instanceof NegatablePredicate predicate) { negated = predicate.negate(); } else { negated = new NotPredicate(original); } inners[i] = negated; } AndPredicate andPredicate = new AndPredicate(inners); return andPredicate; }
@Test public void negate_whenContainsNonNegatablePredicate_thenReturnAndPredicateWithNotInside() { // ~(foo or bar) --> (~foo and ~bar) // this is testing the case where the inner predicate does NOT implement {@link Negatable} Predicate nonNegatable = mock(Predicate.class); OrPredicate or = (OrPredicate) or(nonNegatable); AndPredicate result = (AndPredicate) or.negate(); Predicate[] inners = result.predicates; assertThat(inners).hasSize(1); NotPredicate notPredicate = (NotPredicate) inners[0]; assertThat(nonNegatable).isSameAs(notPredicate.predicate); }
ProducerListeners listeners() { return new ProducerListeners(eventListeners.toArray(new HollowProducerEventListener[0])); }
@Test public void testFireValidationStartDontStopWhenOneFails2() { long version = 31337; HollowProducer.ReadState readState = Mockito.mock(HollowProducer.ReadState.class); Mockito.when(readState.getVersion()).thenReturn(version); Mockito.doThrow(RuntimeException.class).when(validationStatusListener).onValidationStatusStart(version); listenerSupport.listeners().fireValidationStart(readState); Mockito.verify(listener).onValidationStart(version); Mockito.verify(validationStatusListener).onValidationStatusStart(version); Mockito.verify(producerAndValidationStatusListener).onValidationStart(version); }
@Udf public String lpad( @UdfParameter(description = "String to be padded") final String input, @UdfParameter(description = "Target length") final Integer targetLen, @UdfParameter(description = "Padding string") final String padding) { if (input == null) { return null; } if (padding == null || padding.isEmpty() || targetLen == null || targetLen < 0) { return null; } final StringBuilder sb = new StringBuilder(targetLen + padding.length()); final int padUpTo = Math.max(targetLen - input.length(), 0); for (int i = 0; i < padUpTo; i += padding.length()) { sb.append(padding); } sb.setLength(padUpTo); sb.append(input); sb.setLength(targetLen); return sb.toString(); }
@Test public void shouldReturnNullForNegativeLengthBytes() { final ByteBuffer result = udf.lpad(BYTES_123, -1, BYTES_45); assertThat(result, is(nullValue())); }
public static void checkContextPath(String contextPath) { if (contextPath == null) { return; } Matcher matcher = CONTEXT_PATH_MATCH.matcher(contextPath); if (matcher.find()) { throw new IllegalArgumentException("Illegal url path expression"); } }
@Test void testContextPathIllegal3() { assertThrows(IllegalArgumentException.class, () -> { String contextPath3 = "///"; ValidatorUtils.checkContextPath(contextPath3); }); }
public static boolean isEmpty(ProviderGroup group) { return group == null || group.isEmpty(); }
@Test public void isEmpty() throws Exception { ProviderGroup pg = null; Assert.assertTrue(ProviderHelper.isEmpty(pg)); pg = new ProviderGroup("xxx", null); Assert.assertTrue(ProviderHelper.isEmpty(pg)); pg = new ProviderGroup("xxx", new ArrayList<ProviderInfo>()); Assert.assertTrue(ProviderHelper.isEmpty(pg)); pg.add(ProviderHelper.toProviderInfo("127.0.0.1:12200")); Assert.assertFalse(ProviderHelper.isEmpty(pg)); }
@Override public String named() { return PluginEnum.METRICS.getName(); }
@Test public void testNamed() { Assertions.assertEquals(metricsPlugin.named(), PluginEnum.METRICS.getName()); }
public LogicalSchema resolve(final ExecutionStep<?> step, final LogicalSchema schema) { return Optional.ofNullable(HANDLERS.get(step.getClass())) .map(h -> h.handle(this, schema, step)) .orElseThrow(() -> new IllegalStateException("Unhandled step class: " + step.getClass())); }
@Test public void shouldResolveSchemaForStreamSelectKeyV2() { // Given: final UnqualifiedColumnReferenceExp keyExpression1 = new UnqualifiedColumnReferenceExp(ColumnName.of("ORANGE")); final UnqualifiedColumnReferenceExp keyExpression2 = new UnqualifiedColumnReferenceExp(ColumnName.of("APPLE")); final StreamSelectKey<GenericKey> step = new StreamSelectKey<>( PROPERTIES, streamSource, ImmutableList.of(keyExpression1, keyExpression2) ); // When: final LogicalSchema result = resolver.resolve(step, SCHEMA); // Then: assertThat(result, is(LogicalSchema.builder() .keyColumn(keyExpression1.getColumnName(), SqlTypes.INTEGER) .keyColumn(keyExpression2.getColumnName(), SqlTypes.BIGINT) .valueColumns(SCHEMA.value()) .build() )); }
@Override public AppResponse process(Flow flow, MultipleSessionsRequest request) throws FlowNotDefinedException, IOException, NoSuchAlgorithmException { var authAppSession = appSessionService.getSession(request.getAuthSessionId()); if (!isAppSessionAuthenticated(authAppSession)) return new NokResponse(); appAuthenticator = appAuthenticatorService.findByUserAppId(authAppSession.getUserAppId()); if (!isAppAuthenticatorActivated(appAuthenticator)) return new NokResponse(); var response = checkEidasUIT(); return response.orElseGet(() -> addDetailsToResponse(new WebSessionInformationResponse())); }
@Test public void processSessionInformationReceivedActivateResponseNotOk() throws FlowNotDefinedException, IOException, NoSuchAlgorithmException { //given Map<String, String> activateResponse = Map.of("status", "NOK", "faultReason", "NotUnique"); when(appAuthenticatorService.findByUserAppId(authAppSession.getUserAppId())).thenReturn(mockedAppAuthenticator); when(dwsClient.bsnkActivate(response.get("bsn"))).thenReturn(activateResponse); //when AppResponse appResponse = sessionInformationReceived.process(mockedFlow, multipleSessionsRequest); //then assertTrue(appResponse instanceof NokResponse); assertEquals("pip_request_failed_helpdesk", ((NokResponse) appResponse).getError() ); }
Mono<ServerResponse> draftPost(ServerRequest request) { return request.bodyToMono(PostRequest.class) .flatMap(postService::draftPost) .flatMap(post -> ServerResponse.ok().bodyValue(post)); }
@Test void draftPost() { when(postService.draftPost(any())).thenReturn(Mono.just(TestPost.postV1())); webTestClient.post() .uri("/posts") .bodyValue(postRequest(TestPost.postV1())) .exchange() .expectStatus() .isOk() .expectBody(Post.class) .value(post -> assertThat(post).isEqualTo(TestPost.postV1())); }
@Override public long longValue() { return value; }
@Override @Test void testLongValue() { for (int i = -1000; i < 3000; i += 200) { assertEquals((long) i, COSInteger.get(i).longValue()); } }
public static int checkPositiveOrZero(int n, String name) { if (n < 0) { throw new IllegalArgumentException(name + ": " + n + " (expected: >= 0)"); } return n; }
@Test public void checkPositiveOrZeroMustPassIfArgumentIsZero() { final int n = 0; final int actual = RangeUtil.checkPositiveOrZero(n, "var"); assertThat(actual, is(equalTo(n))); }
@Override public List<LogicalSlot> peakSlotsToAllocate(SlotTracker slotTracker) { updateOptionsPeriodically(); List<LogicalSlot> slotsToAllocate = Lists.newArrayList(); int curNumAllocatedSmallSlots = numAllocatedSmallSlots; for (SlotContext slotContext : requiringSmallSlots.values()) { LogicalSlot slot = slotContext.getSlot(); if (curNumAllocatedSmallSlots + slot.getNumPhysicalSlots() > opts.v2().getTotalSmallSlots()) { break; } requiringQueue.remove(slotContext); slotsToAllocate.add(slot); slotContext.setAllocateAsSmallSlot(); curNumAllocatedSmallSlots += slot.getNumPhysicalSlots(); } int numAllocatedSlots = slotTracker.getNumAllocatedSlots() - numAllocatedSmallSlots; while (!requiringQueue.isEmpty()) { SlotContext slotContext = requiringQueue.peak(); if (!isGlobalSlotAvailable(numAllocatedSlots, slotContext.getSlot())) { break; } requiringQueue.poll(); slotsToAllocate.add(slotContext.getSlot()); numAllocatedSlots += slotContext.getSlot().getNumPhysicalSlots(); } return slotsToAllocate; }
@Test public void testUpdateOptionsPeriodicallyAtAllocating() throws InterruptedException { QueryQueueOptions opts = QueryQueueOptions.createFromEnv(); SlotSelectionStrategyV2 strategy = new SlotSelectionStrategyV2(); SlotTracker slotTracker = new SlotTracker(ImmutableList.of(strategy)); LogicalSlot slot1 = generateSlot(opts.v2().getTotalSlots() / 2 + 1); LogicalSlot slot2 = generateSlot(opts.v2().getTotalSlots() / 2 - 1); LogicalSlot slot3 = generateSlot(2); // 1. Require slot1, slot2, slot3. assertThat(slotTracker.requireSlot(slot1)).isTrue(); assertThat(slotTracker.requireSlot(slot2)).isTrue(); assertThat(slotTracker.requireSlot(slot3)).isTrue(); // 2. Peak slot2 and slot3. assertThat(strategy.peakSlotsToAllocate(slotTracker)).containsExactly(slot3, slot2); // Make options changed. BackendResourceStat.getInstance().setNumHardwareCoresOfBe(1, NUM_CORES * 2); Thread.sleep(1200); // 3. Allocate slot2 and slot3. slotTracker.allocateSlot(slot2); slotTracker.allocateSlot(slot3); assertThat(slot2.getState()).isEqualTo(LogicalSlot.State.ALLOCATED); assertThat(slot3.getState()).isEqualTo(LogicalSlot.State.ALLOCATED); assertThat(slotTracker.getNumAllocatedSlots()).isEqualTo(slot2.getNumPhysicalSlots() + slot3.getNumPhysicalSlots()); // 4. Peak slot1. assertThat(strategy.peakSlotsToAllocate(slotTracker)).containsExactly(slot1); slotTracker.allocateSlot(slot1); assertThat(slot1.getState()).isEqualTo(LogicalSlot.State.ALLOCATED); assertThat(slotTracker.getNumAllocatedSlots()).isEqualTo( slot2.getNumPhysicalSlots() + slot3.getNumPhysicalSlots() + slot1.getNumPhysicalSlots()); }
@Override public Map<String, Boolean> getUserUuidToManaged(DbSession dbSession, Set<String> userUuids) { return findManagedInstanceService() .map(managedInstanceService -> managedInstanceService.getUserUuidToManaged(dbSession, userUuids)) .orElse(returnNonManagedForAll(userUuids)); }
@Test public void getUserUuidToManaged_ifMoreThanOneDelegatesActivated_throws() { Set<ManagedInstanceService> managedInstanceServices = Set.of(new AlwaysManagedInstanceService(), new AlwaysManagedInstanceService()); DelegatingManagedServices delegatingManagedServices = new DelegatingManagedServices(managedInstanceServices); assertThatIllegalStateException() .isThrownBy(() -> delegatingManagedServices.getUserUuidToManaged(dbSession, Set.of("a"))) .withMessage("The instance can't be managed by more than one identity provider and 2 were found."); }
public static void removeStatus(String service) { SERVICE_STATUS_MAP.remove(service); }
@Test public void removeStatus() { RpcStatus old = RpcStatus.getStatus(SERVICE); RpcStatus.removeStatus(SERVICE); Assertions.assertNotEquals(RpcStatus.getStatus(SERVICE), old); }
public static Object convert(YamlNode yamlNode) { if (yamlNode == null) { return JSONObject.NULL; } if (yamlNode instanceof YamlMapping yamlMapping) { JSONObject resultObject = new JSONObject(); for (YamlNameNodePair pair : yamlMapping.childrenPairs()) { resultObject.put(pair.nodeName(), convert(pair.childNode())); } return resultObject; } else if (yamlNode instanceof YamlSequence yamlSequence) { JSONArray resultArray = new JSONArray(); for (YamlNode child : yamlSequence.children()) { resultArray.put(convert(child)); } return resultArray; } else if (yamlNode instanceof YamlScalar scalar) { return scalar.nodeValue(); } throw new IllegalArgumentException("Unknown type " + yamlNode.getClass().getName()); }
@Test public void convertSuccess() { String expectedJson = "{\n" + " \"hazelcast\":{\n" + " \"network\":{\n" + " \"port\":{\n" + " \"auto-increment\":true,\n" + " \"port-count\":100,\n" + " \"port\":5701,\n" + " \"outbound-ports\":[\n" + " \"33000-35000\",\n" + " \"37000,37001,37002,37003\",\n" + " \"38000,38500-38600\"\n" + " ]\n" + " },\n" + " \"public-address\":\"dummy\"\n" + " }\n" + " }\n" + "}"; JSONObject expectedJsonObject = new JSONObject(expectedJson); YamlMappingImpl parentNode = createYamlMapping(); Object converted = YamlToJsonConverter.convert(parentNode); assertTrue(expectedJsonObject.similar(converted)); }
public static Set<String> findKeywordsFromCrashReport(String crashReport) { Matcher matcher = CRASH_REPORT_STACK_TRACE_PATTERN.matcher(crashReport); Set<String> result = new HashSet<>(); if (matcher.find()) { for (String line : matcher.group("stacktrace").split("\\n")) { Matcher lineMatcher = STACK_TRACE_LINE_PATTERN.matcher(line); if (lineMatcher.find()) { String[] method = lineMatcher.group("method").split("\\."); for (int i = 0; i < method.length - 2; i++) { if (PACKAGE_KEYWORD_BLACK_LIST.contains(method[i])) { continue; } result.add(method[i]); } Matcher moduleMatcher = STACK_TRACE_LINE_MODULE_PATTERN.matcher(line); if (moduleMatcher.find()) { for (String module : moduleMatcher.group("tokens").split(",")) { String[] split = module.split(":"); if (split.length >= 2 && "xf".equals(split[0])) { if (PACKAGE_KEYWORD_BLACK_LIST.contains(split[1])) { continue; } result.add(split[1]); } } } } } } return result; }
@Test public void creativemd() throws IOException { assertEquals( new HashSet<>(Arrays.asList("creativemd", "itemphysic")), CrashReportAnalyzer.findKeywordsFromCrashReport(loadLog("/crash-report/mod/creativemd.txt"))); }
public DirectoryEntry lookUp( File workingDirectory, JimfsPath path, Set<? super LinkOption> options) throws IOException { checkNotNull(path); checkNotNull(options); DirectoryEntry result = lookUp(workingDirectory, path, options, 0); if (result == null) { // an intermediate file in the path did not exist or was not a directory throw new NoSuchFileException(path.toString()); } return result; }
@Test public void testLookup_relative_finalSymlink() throws IOException { assertExists(lookup("four/five"), "/", "foo"); assertExists(lookup("four/six"), "work", "one"); }
public ImmutableSet<GrantDTO> getForGranteesOrGlobalWithCapability(Set<GRN> grantees, Capability capability) { return streamQuery(DBQuery.and( DBQuery.or( DBQuery.in(GrantDTO.FIELD_GRANTEE, grantees), DBQuery.is(GrantDTO.FIELD_GRANTEE, GRNRegistry.GLOBAL_USER_GRN.toString()) ), DBQuery.is(GrantDTO.FIELD_CAPABILITY, capability) )).collect(ImmutableSet.toImmutableSet()); }
@Test @MongoDBFixtures("grants.json") public void getForGranteesOrGlobalWithCapability() { final GRN jane = grnRegistry.newGRN("user", "jane"); final GRN john = grnRegistry.newGRN("user", "john"); assertThat(dbService.getForGranteesOrGlobalWithCapability(ImmutableSet.of(jane), Capability.MANAGE)).hasSize(1); assertThat(dbService.getForGranteesOrGlobalWithCapability(ImmutableSet.of(jane), Capability.OWN)).hasSize(1); assertThat(dbService.getForGranteesOrGlobalWithCapability(ImmutableSet.of(john), Capability.VIEW)).hasSize(2); assertThat(dbService.getForGranteesOrGlobalWithCapability(ImmutableSet.of(jane, john), Capability.VIEW)).hasSize(3); assertThat(dbService.getForGranteesOrGlobalWithCapability(ImmutableSet.of(jane, john), Capability.MANAGE)).hasSize(1); assertThat(dbService.getForGranteesOrGlobalWithCapability(ImmutableSet.of(jane, john), Capability.OWN)).hasSize(2); }
public static Gson instance() { return SingletonHolder.INSTANCE; }
@SuppressWarnings("unchecked") @Test void successfullySerializesConfigurationPropertyBecauseGoCipherIsHiddenFromSerialization() { assertDoesNotThrow(() -> { final String json = Serialization.instance().toJson(new ConfigurationProperty(dumbCipher()) .withKey("hello") .withEncryptedValue("dlrow")); Map<String, String> actual = new Gson().fromJson(json, Map.class); assertEquals(2, actual.size()); assertEquals("hello", actual.get("key")); assertEquals("world", actual.get("value")); }, "ConfigurationProperty should serialize without error because its type adapter hides the nested GoCipher from Gson"); }
@Override public <T extends Metric> T register(String name, T metric) throws IllegalArgumentException { if (metric == null) { throw new NullPointerException("metric == null"); } return metric; }
@Test public void registeringAMeterTriggersNoNotification() { assertThat(registry.register("thing", meter)).isEqualTo(meter); verify(listener, never()).onMeterAdded("thing", meter); }
@Transactional(readOnly = true) public UserSyncDto isSignUpAllowed(String phone) { Optional<User> user = userService.readUserByPhone(phone); if (!isExistUser(user)) { log.info("회원가입 이력이 없는 사용자입니다. phone: {}", phone); return UserSyncDto.signUpAllowed(); } if (isGeneralSignUpUser(user.get())) { log.warn("이미 회원가입된 사용자입니다. user: {}", user.get()); return UserSyncDto.abort(user.get().getId(), user.get().getUsername()); } log.info("소셜 회원가입 사용자입니다. user: {}", user.get()); return UserSyncDto.of(true, true, user.get().getId(), user.get().getUsername()); }
@DisplayName("일반 회원가입 시, oauth 회원 정보만 있으면 {회원 가입 가능, 기존 계정 있음, 기존 계정 아이디} 응답을 반환한다.") @Test void isSignedUserWhenGeneralReturnTrue() { // given given(userService.readUserByPhone(phone)).willReturn(Optional.of(UserFixture.OAUTH_USER.toUser())); // when UserSyncDto userSync = userGeneralSignService.isSignUpAllowed(phone); // then assertTrue(userSync.isSignUpAllowed()); assertTrue(userSync.isExistAccount()); assertEquals(UserFixture.OAUTH_USER.getUsername(), userSync.username()); }
@Override public ListConsumerGroupOffsetsResult listConsumerGroupOffsets(Map<String, ListConsumerGroupOffsetsSpec> groupSpecs, ListConsumerGroupOffsetsOptions options) { SimpleAdminApiFuture<CoordinatorKey, Map<TopicPartition, OffsetAndMetadata>> future = ListConsumerGroupOffsetsHandler.newFuture(groupSpecs.keySet()); ListConsumerGroupOffsetsHandler handler = new ListConsumerGroupOffsetsHandler(groupSpecs, options.requireStable(), logContext); invokeDriver(handler, future, options.timeoutMs); return new ListConsumerGroupOffsetsResult(future.all()); }
@Test public void testBatchedListConsumerGroupOffsets() throws Exception { Cluster cluster = mockCluster(1, 0); Time time = new MockTime(); Map<String, ListConsumerGroupOffsetsSpec> groupSpecs = batchedListConsumerGroupOffsetsSpec(); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(time, cluster, AdminClientConfig.RETRIES_CONFIG, "0")) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse(prepareBatchedFindCoordinatorResponse(Errors.NONE, env.cluster().controller(), groupSpecs.keySet())); ListConsumerGroupOffsetsResult result = env.adminClient().listConsumerGroupOffsets(groupSpecs, new ListConsumerGroupOffsetsOptions()); sendOffsetFetchResponse(env.kafkaClient(), groupSpecs, true, Errors.NONE); verifyListOffsetsForMultipleGroups(groupSpecs, result); } }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void setPassportDataErrors() { BaseResponse response = bot.execute(new SetPassportDataErrors(chatId, new PassportElementErrorDataField("personal_details", "first_name", "TueU2/SswOD5wgQ6uXQ62mJrr0Jdf30r/QQ/jyETHFM=", "error in page 1") )); System.out.println(response); assertTrue(response.isOk()); }
@Override public double cdf(double x) { double p = 0.5 * Beta.regularizedIncompleteBetaFunction(0.5 * nu, 0.5, nu / (nu + x * x)); if (x >= 0) { return 1.0 - p; } else { return p; } }
@Test public void testCdf() { System.out.println("cdf"); TDistribution instance = new TDistribution(20); instance.rand(); assertEquals(1.581891e-09, instance.cdf(-10.0), 1E-15); assertEquals(0.02963277, instance.cdf(-2.0), 1E-7); assertEquals(0.1646283, instance.cdf(-1.0), 1E-7); assertEquals(0.5, instance.cdf(0.0), 1E-7); assertEquals(0.8353717, instance.cdf(1.0), 1E-7); assertEquals(0.9703672, instance.cdf(2.0), 1E-7); assertEquals(1.0, instance.cdf(10.0), 1E-7); }
@Override public String getAuthenticationMethodName() { return PostgreSQLAuthenticationMethod.MD5.getMethodName(); }
@Test void assertAuthenticationMethodName() { assertThat(new OpenGaussMD5PasswordAuthenticator().getAuthenticationMethodName(), is("md5")); }
public abstract long observeWm(int queueIndex, long wmValue);
@Test public void when_i1Idle_i2HasWm_then_forwardedImmediately() { assertEquals(Long.MIN_VALUE, wc.observeWm(0, IDLE_MESSAGE.timestamp())); assertEquals(100, wc.observeWm(1, 100)); }
@Override public List<RemoteFileInfo> getRemoteFiles(Table table, GetRemoteFilesParams params) { List<Partition> partitions = buildGetRemoteFilesPartitions(table, params); boolean useCache = true; if (table instanceof HiveTable) { useCache = ((HiveTable) table).isUseMetadataCache(); } // if we disable cache explicitly if (!params.isUseCache()) { useCache = false; } GetRemoteFilesParams updatedParams = params.copy(); updatedParams.setUseCache(useCache); return fileOps.getRemoteFiles(table, partitions, updatedParams); }
@Test public void testGetFileWithSubdir() throws StarRocksConnectorException { RemotePathKey pathKey = new RemotePathKey("hdfs://127.0.0.1:10000/hive.db", true); Map<RemotePathKey, List<RemoteFileDesc>> files = hiveRemoteFileIO.getRemoteFiles(pathKey); List<RemoteFileDesc> remoteFileDescs = files.get(pathKey); Assert.assertEquals(1, remoteFileDescs.size()); Assert.assertEquals("hive_tbl/000000_0", remoteFileDescs.get(0).getFileName()); }
@Override public void run() { try { // make sure we call afterRun() even on crashes // and operate countdown latches, else we may hang the parallel runner if (steps == null) { beforeRun(); } if (skipped) { return; } int count = steps.size(); int index = 0; while ((index = nextStepIndex()) < count) { currentStep = steps.get(index); execute(currentStep); if (currentStepResult != null) { // can be null if debug step-back or hook skip result.addStepResult(currentStepResult); } } } catch (Exception e) { if (currentStepResult != null) { result.addStepResult(currentStepResult); } logError("scenario [run] failed\n" + StringUtils.throwableToString(e)); currentStepResult = result.addFakeStepResult("scenario [run] failed", e); } finally { if (!skipped) { afterRun(); if (isFailed() && engine.getConfig().isAbortSuiteOnFailure()) { featureRuntime.suite.abort(); } } if (caller.isNone()) { logAppender.close(); // reclaim memory } } }
@Test void testMatch() { run( "def foo = { a: 1 }", "def mat1 = karate.match(foo, {a: 2})", "def mat2 = karate.match('foo == { a: 1 }')", "def bar = []", "def mat3 = karate.match(bar, [])" ); matchVar("mat1", "{ pass: false, message: '#notnull' }"); matchVar("mat2", "{ pass: true, message: '#null' }"); matchVar("mat3", "{ pass: true, message: '#null' }"); }
Dependency newDependency(MavenProject prj) { final File pom = new File(prj.getBasedir(), "pom.xml"); if (pom.isFile()) { getLog().debug("Adding virtual dependency from pom.xml"); return new Dependency(pom, true); } else if (prj.getFile().isFile()) { getLog().debug("Adding virtual dependency from file"); return new Dependency(prj.getFile(), true); } else { return new Dependency(true); } }
@Test public void should_newDependency_get_pom_declared_as_module() { // Given BaseDependencyCheckMojo instance = new BaseDependencyCheckMojoImpl(); new MockUp<MavenProject>() { @Mock public File getBasedir() { return new File("src/test/resources/dir_containing_maven_poms_declared_as_modules_in_another_pom"); } @Mock public File getFile() { return new File("src/test/resources/dir_containing_maven_poms_declared_as_modules_in_another_pom/serverlibs.pom"); } }; String expectOutput = "serverlibs.pom"; // When String output = instance.newDependency(project).getFileName(); // Then assertEquals(expectOutput, output); }
public static String hmacMd5(String data, String key) { return hmacMd5(data.getBytes(), key.getBytes()); }
@Test public void testHmacMD5() throws Exception { String biezhiHmacMD5 = "c3b11ef266e3eab92d7870b43483640c"; Assert.assertEquals( biezhiHmacMD5, EncryptKit.hmacMd5("biezhi", biezhiHmackey) ); Assert.assertEquals( biezhiHmacMD5, EncryptKit.hmacMd5("biezhi".getBytes(), biezhiHmackey.getBytes()) ); TestCase.assertTrue( Arrays.equals( ConvertKit.hexString2Bytes(biezhiHmacMD5), EncryptKit.hmacMd5ToByte("biezhi".getBytes(), biezhiHmackey.getBytes()) ) ); }
public int getAceIndex( ObjectAce ace ) { List<ObjectAce> aceList = obj.getAces(); for ( int i = 0; i < aceList.size(); i++ ) { if ( ace.equals( aceList.get( i ) ) ) { return i; } } return -1; }
@Test public void testGetAceIndex() { List<UIRepositoryObjectAcl> originalUIAcls = Arrays.asList( new UIRepositoryObjectAcl[] { objectAcl1, objectAcl2, objectAcl3 } ); repositoryObjectAcls.addAcls( originalUIAcls ); int i = repositoryObjectAcls.getAceIndex( objectAcl2.getAce() ); assertTrue( objectAcl2.equals( repositoryObjectAcls.getAcls().get( i ) ) ); }
@Udf(description = "Converts a string representation of a date in the given format" + " into the number of milliseconds since 1970-01-01 00:00:00 UTC/GMT." + " Single quotes in the timestamp format can be escaped with ''," + " for example: 'yyyy-MM-dd''T''HH:mm:ssX'." + " The system default time zone is used when no time zone is explicitly provided.") public long stringToTimestamp( @UdfParameter( description = "The string representation of a date.") final String formattedTimestamp, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { // NB: We do not perform a null here preferring to throw an exception as // there is no sentinel value for a "null" Date. try { final StringToTimestampParser timestampParser = parsers.get(formatPattern); return timestampParser.parse(formattedTimestamp); } catch (final ExecutionException | RuntimeException e) { throw new KsqlFunctionException("Failed to parse timestamp '" + formattedTimestamp + "' with formatter '" + formatPattern + "': " + e.getMessage(), e); } }
@Test public void shouldSupportEmbeddedChars() throws ParseException { // When: final Object result = udf.stringToTimestamp("2021-12-01T12:10:11.123Fred", "yyyy-MM-dd'T'HH:mm:ss.SSS'Fred'"); // Then: final long expectedResult = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Fred'") .parse("2021-12-01T12:10:11.123Fred").getTime(); assertThat(result, is(expectedResult)); }
@Override public void afterMethod(final TargetAdviceObject target, final TargetAdviceMethod method, final Object[] args, final Object result, final String pluginType) { MetricsCollectorRegistry.<CounterMetricsCollector>get(config, pluginType).inc(getTransactionType()); }
@Test void assertWithRollback() { RollbackTransactionsCountAdvice advice = new RollbackTransactionsCountAdvice(); advice.afterMethod(new TargetAdviceObjectFixture(), mock(TargetAdviceMethod.class), new Object[]{}, null, "FIXTURE"); assertThat(MetricsCollectorRegistry.get(config, "FIXTURE").toString(), is("rollback=1")); }
public static void checkAtLeastOneChar(final Properties props, final String propKey, final MaskAlgorithm<?, ?> algorithm) { checkRequired(props, propKey, algorithm); ShardingSpherePreconditions.checkNotEmpty(props.getProperty(propKey), () -> new AlgorithmInitializationException(algorithm, "%s's length must be at least one", propKey)); }
@Test void assertCheckAtLeastOneCharFailedWithEmptyChar() { Properties props = PropertiesBuilder.build(new Property("key", "")); assertThrows(AlgorithmInitializationException.class, () -> MaskAlgorithmPropertiesChecker.checkAtLeastOneChar(props, "key", mock(MaskAlgorithm.class))); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof EntryView that)) { return false; } return getKey().equals(that.getKey()) && getValue().equals(that.getValue()) && getVersion() == that.getVersion() && getCost() == that.getCost() && getCreationTime() == that.getCreationTime() && getExpirationTime() == that.getExpirationTime() && getHits() == that.getHits() && getLastAccessTime() == that.getLastAccessTime() && getLastStoredTime() == that.getLastStoredTime() && getLastUpdateTime() == that.getLastUpdateTime() && getTtl() == that.getTtl(); }
@Test public void test_equals() { EntryView entryView = createLazyEvictableEntryView(); assertTrue(view.equals(entryView) && entryView.equals(view)); }
@Override public OffsetCommitRequest.Builder buildBatchedRequest( int coordinatorId, Set<CoordinatorKey> groupIds ) { validateKeys(groupIds); Map<String, OffsetCommitRequestTopic> offsetData = new HashMap<>(); offsets.forEach((topicPartition, offsetAndMetadata) -> { OffsetCommitRequestTopic topic = offsetData.computeIfAbsent( topicPartition.topic(), key -> new OffsetCommitRequestTopic().setName(topicPartition.topic()) ); topic.partitions().add(new OffsetCommitRequestPartition() .setCommittedOffset(offsetAndMetadata.offset()) .setCommittedLeaderEpoch(offsetAndMetadata.leaderEpoch().orElse(-1)) .setCommittedMetadata(offsetAndMetadata.metadata()) .setPartitionIndex(topicPartition.partition())); }); OffsetCommitRequestData data = new OffsetCommitRequestData() .setGroupId(groupId.idValue) .setTopics(new ArrayList<>(offsetData.values())); return new OffsetCommitRequest.Builder(data); }
@Test public void testBuildRequest() { AlterConsumerGroupOffsetsHandler handler = new AlterConsumerGroupOffsetsHandler(groupId, partitions, logContext); OffsetCommitRequest request = handler.buildBatchedRequest(-1, singleton(CoordinatorKey.byGroupId(groupId))).build(); assertEquals(groupId, request.data().groupId()); assertEquals(2, request.data().topics().size()); assertEquals(2, request.data().topics().get(0).partitions().size()); assertEquals(offset, request.data().topics().get(0).partitions().get(0).committedOffset()); }
@Override public boolean getBoolean(final int columnIndex) throws SQLException { return (boolean) ResultSetUtils.convertValue(mergeResultSet.getValue(columnIndex, boolean.class), boolean.class); }
@Test void assertGetBooleanWithColumnLabel() throws SQLException { when(mergeResultSet.getValue(1, boolean.class)).thenReturn(true); assertTrue(shardingSphereResultSet.getBoolean("label")); }
@Override protected OutputStream createObject(String key) throws IOException { if (mUfsConf.getBoolean(PropertyKey.UNDERFS_COS_MULTIPART_UPLOAD_ENABLED)) { return new COSMultipartUploadOutputStream(mBucketNameInternal, key, mClient, mMultipartUploadExecutor.get(), mUfsConf); } return new COSOutputStream(mBucketNameInternal, key, mClient, mUfsConf.getList(PropertyKey.TMP_DIRS)); }
@Test public void testCreateObject() throws IOException { // test successful create object Mockito.when(mClient.putObject(ArgumentMatchers.anyString(), ArgumentMatchers.anyString(), ArgumentMatchers.any(InputStream.class), ArgumentMatchers.any(ObjectMetadata.class))).thenReturn(null); OutputStream result = mCOSUnderFileSystem.createObject(KEY); Assert.assertTrue(result instanceof COSOutputStream); }
@Override public ByteBuf getBytes(int index, byte[] dst) { getBytes(index, dst, 0, dst.length); return this; }
@Test public void testGetBytesAfterRelease() { final ByteBuf buffer = buffer(8); try { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().getBytes(0, buffer); } }); } finally { buffer.release(); } }
@NewTracker public TrackerWithProgress newTracker( @Element SubscriptionPartition subscriptionPartition, @Restriction OffsetByteRange range) { return trackerFactory.apply(backlogReaderFactory.create(subscriptionPartition), range); }
@Test public void newTrackerCallsFactory() { assertSame(tracker, sdf.newTracker(PARTITION, RESTRICTION)); verify(trackerFactory).apply(backlogReader, RESTRICTION); }
public String getXML() { try { StringBuilder xml = new StringBuilder( 100 ); xml.append( XMLHandler.getXMLHeader() ); // UFT-8 XML header xml.append( XMLHandler.openTag( XML_TAG ) ).append( Const.CR ); if ( id != null ) { xml.append( " " ).append( XMLHandler.addTagValue( "ID", id ) ); } xml.append( " " ).append( XMLHandler.addTagValue( "DragType", getTypeCode() ) ); xml.append( " " ).append( XMLHandler .addTagValue( "Data", new String( Base64.encodeBase64( data.getBytes( Const.XML_ENCODING ) ) ) ) ); xml.append( XMLHandler.closeTag( XML_TAG ) ).append( Const.CR ); return xml.toString(); } catch ( UnsupportedEncodingException e ) { throw new RuntimeException( "Unable to encode String in encoding [" + Const.XML_ENCODING + "]", e ); } }
@Test public void getXML() { DragAndDropContainer dnd = new DragAndDropContainer( DragAndDropContainer.TYPE_BASE_STEP_TYPE, "Step Name" ); String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + Const.CR + "<DragAndDrop>" + Const.CR + " <DragType>BaseStep</DragType>" + Const.CR + " <Data>U3RlcCBOYW1l</Data>" + Const.CR + "</DragAndDrop>" + Const.CR; assertEquals( xml, dnd.getXML() ); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) { String highwayValue = way.getTag("highway"); if (skipEmergency && "service".equals(highwayValue) && "emergency_access".equals(way.getTag("service"))) return; int firstIndex = way.getFirstIndex(restrictionKeys); String firstValue = firstIndex < 0 ? "" : way.getTag(restrictionKeys.get(firstIndex), ""); if (restrictedValues.contains(firstValue) && !hasTemporalRestriction(way, firstIndex, restrictionKeys)) return; if (way.hasTag("gh:barrier_edge") && way.hasTag("node_tags")) { List<Map<String, Object>> nodeTags = way.getTag("node_tags", null); Map<String, Object> firstNodeTags = nodeTags.get(0); // a barrier edge has the restriction in both nodes and the tags are the same -> get(0) firstValue = getFirstPriorityNodeTag(firstNodeTags, restrictionKeys); String barrierValue = firstNodeTags.containsKey("barrier") ? (String) firstNodeTags.get("barrier") : ""; if (restrictedValues.contains(firstValue) || barriers.contains(barrierValue) || "yes".equals(firstNodeTags.get("locked")) && !INTENDED.contains(firstValue)) return; } if (FerrySpeedCalculator.isFerry(way)) { boolean isCar = restrictionKeys.contains("motorcar"); if (INTENDED.contains(firstValue) // implied default is allowed only if foot and bicycle is not specified: || isCar && firstValue.isEmpty() && !way.hasTag("foot") && !way.hasTag("bicycle") // if hgv is allowed then smaller trucks and cars are allowed too even if not specified || isCar && way.hasTag("hgv", "yes")) { accessEnc.setBool(false, edgeId, edgeIntAccess, true); accessEnc.setBool(true, edgeId, edgeIntAccess, true); } } else { boolean isRoundabout = roundaboutEnc.getBool(false, edgeId, edgeIntAccess); boolean ignoreOneway = "no".equals(way.getFirstValue(ignoreOnewayKeys)); boolean isBwd = isBackwardOneway(way); if (!ignoreOneway && (isBwd || isRoundabout || isForwardOneway(way))) { accessEnc.setBool(isBwd, edgeId, edgeIntAccess, true); } else { accessEnc.setBool(false, edgeId, edgeIntAccess, true); accessEnc.setBool(true, edgeId, edgeIntAccess, true); } } }
@Test public void testOneway() { ReaderWay way = new ReaderWay(1); way.setTag("highway", "primary"); way.setTag("oneway", "yes"); int edgeId = 0; EdgeIntAccess edgeIntAccess = ArrayEdgeIntAccess.createFromBytes(em.getBytesForFlags()); parser.handleWayTags(edgeId, edgeIntAccess, way, null); assertTrue(busAccessEnc.getBool(false, edgeId, edgeIntAccess)); assertFalse(busAccessEnc.getBool(true, edgeId, edgeIntAccess)); way.clearTags(); way.setTag("highway", "tertiary"); way.setTag("vehicle:forward", "no"); edgeIntAccess = ArrayEdgeIntAccess.createFromBytes(em.getBytesForFlags()); parser.handleWayTags(edgeId, edgeIntAccess, way, null); assertFalse(busAccessEnc.getBool(false, edgeId, edgeIntAccess)); assertTrue(busAccessEnc.getBool(true, edgeId, edgeIntAccess)); way.clearTags(); way.setTag("highway", "tertiary"); way.setTag("vehicle:backward", "no"); edgeIntAccess = ArrayEdgeIntAccess.createFromBytes(em.getBytesForFlags()); parser.handleWayTags(edgeId, edgeIntAccess, way, null); assertTrue(busAccessEnc.getBool(false, edgeId, edgeIntAccess)); assertFalse(busAccessEnc.getBool(true, edgeId, edgeIntAccess)); way.setTag("bus:backward", "yes"); edgeIntAccess = ArrayEdgeIntAccess.createFromBytes(em.getBytesForFlags()); parser.handleWayTags(edgeId, edgeIntAccess, way, null); assertTrue(busAccessEnc.getBool(false, edgeId, edgeIntAccess)); assertTrue(busAccessEnc.getBool(true, edgeId, edgeIntAccess)); way.clearTags(); way.setTag("highway", "tertiary"); way.setTag("vehicle:backward", "yes"); way.setTag("bus:backward", "no"); edgeIntAccess = ArrayEdgeIntAccess.createFromBytes(em.getBytesForFlags()); parser.handleWayTags(edgeId, edgeIntAccess, way, null); assertTrue(busAccessEnc.getBool(false, edgeId, edgeIntAccess)); assertFalse(busAccessEnc.getBool(true, edgeId, edgeIntAccess)); }
@Override public Path touch(final Path file, final TransferStatus status) throws BackgroundException { try { final FileAttributes attrs; if(Permission.EMPTY != status.getPermission()) { attrs = new FileAttributes.Builder().withPermissions(Integer.parseInt(status.getPermission().getMode(), 8)).build(); } else { attrs = FileAttributes.EMPTY; } final RemoteFile handle = session.sftp().open(file.getAbsolute(), EnumSet.of(OpenMode.CREAT, OpenMode.TRUNC, OpenMode.WRITE), attrs); handle.close(); } catch(IOException e) { throw new SFTPExceptionMappingService().map("Cannot create {0}", e, file); } return file; }
@Test public void testTouch() throws Exception { final Path home = new SFTPHomeDirectoryService(session).find(); final Path test = new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new SFTPTouchFeature(session).touch(test, new TransferStatus()); new SFTPUnixPermissionFeature(session).setUnixPermission(test, new Permission("664")); // Test override new SFTPTouchFeature(session).touch(test, new TransferStatus()); final AttributedList<Path> list = new SFTPListService(session).list(home, new DisabledListProgressListener()); assertTrue(list.contains(test)); assertEquals("664", list.get(test).attributes().getPermission().getMode()); new SFTPDeleteFeature(session).delete(Collections.<Path>singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public boolean createMaintenanceDomain(MaintenanceDomain newMd) throws CfmConfigException { log.info("Creating/Updating MD {} in distributed store", newMd.mdId()); return store.createUpdateMaintenanceDomain(newMd); }
@Test public void testCreateMaintenanceDomain() throws CfmConfigException { MaintenanceAssociation maTest21 = DefaultMaintenanceAssociation .builder(MaIdCharStr.asMaId("test-ma-2-1"), 9) .ccmInterval(MaintenanceAssociation.CcmInterval.INTERVAL_1S) .maNumericId((short) 1) .addToRemoteMepIdList(MepId.valueOf((short) 101)) .addToRemoteMepIdList(MepId.valueOf((short) 102)) .addToComponentList( DefaultComponent.builder(1) .tagType(Component.TagType.VLAN_STAG) .build()) .build(); MaintenanceDomain mdTest2 = DefaultMaintenanceDomain .builder(MdIdCharStr.asMdId("test-md-2")) .mdLevel(MaintenanceDomain.MdLevel.LEVEL2) .mdNumericId((short) 2) .addToMaList(maTest21) .build(); //We expect false here because there should have been no previous value //with that ID in the store assertFalse(service.createMaintenanceDomain(mdTest2)); }
public static String getCurrentStackTrace() { return Arrays.stream(Thread.currentThread().getStackTrace()) .map(stack -> " " + stack.toString()) .collect(Collectors.joining(System.lineSeparator(), System.lineSeparator(), "")); }
@Test public void testGetCurrentStackTrace() { String trace = LogUtil.getCurrentStackTrace(); Assert.assertTrue(trace.startsWith("\n ")); Assert.assertTrue(trace.contains("java.lang.Thread.getStackTrace")); }
public static GeneratedResource getGeneratedResourceObject(String generatedResourceString) throws JsonProcessingException { return objectMapper.readValue(generatedResourceString, GeneratedResource.class); }
@Test void getGeneratedResourceObject() throws JsonProcessingException { String generatedResourceString = "{\"step-type\":\"redirect\",\"modelLocalUriId\":{\"model\":\"this\"," + "\"basePath\":\"/is/fri_foo\",\"fullPath\":\"/this/is/fri_foo\"},\"target\":\"foo\"}"; GeneratedResource retrieved = JSONUtils.getGeneratedResourceObject(generatedResourceString); assertThat(retrieved).isNotNull().isInstanceOf(GeneratedRedirectResource.class); generatedResourceString = "{\"step-type\":\"class\",\"fullClassName\":\"full.class.Name\"}\""; retrieved = JSONUtils.getGeneratedResourceObject(generatedResourceString); assertThat(retrieved).isNotNull().isInstanceOf(GeneratedClassResource.class); generatedResourceString = "{\"step-type\":\"executable\"," + "\"modelLocalUriId\":{\"model\":\"this\",\"basePath\":\"/this/is/fri_foo\",\"fullPath\":\"/this/is/fri_foo\"},\"fullClassNames\":[\"full.class.Name\"]}"; retrieved = JSONUtils.getGeneratedResourceObject(generatedResourceString); assertThat(retrieved).isNotNull().isInstanceOf(GeneratedExecutableResource.class); }
@Override public Object toConnectRow(final Object ksqlData) { /* * Reconstruct ksqlData struct with given schema and try to put original data in it. * Schema may have more fields than ksqlData, don't put those field by default. If needed by * some format like Avro, create new subclass to handle */ if (ksqlData instanceof Struct) { final Schema schema = getSchema(); validate(((Struct) ksqlData).schema(), schema); final Struct struct = new Struct(schema); final Struct source = (Struct) ksqlData; for (final Field sourceField : source.schema().fields()) { final Object value = source.get(sourceField); struct.put(sourceField.name(), value); } return struct; } return ksqlData; }
@Test public void shouldThrowIfMissingField() { // Given: final Schema schema = SchemaBuilder.struct() .field("f1", SchemaBuilder.OPTIONAL_STRING_SCHEMA) .field("f3", SchemaBuilder.OPTIONAL_INT64_SCHEMA) .build(); final Struct struct = new Struct(ORIGINAL_SCHEMA) .put("f1", "abc") .put("f2", 12); // When: final Exception e = assertThrows( KsqlException.class, () -> new ConnectSRSchemaDataTranslator(schema).toConnectRow(struct) ); // Then: assertThat(e.getMessage(), is("Schema from Schema Registry misses field with name: f2")); }
String messageFromFile(Locale locale, String filename, String relatedProperty) { String result = null; String bundleBase = propertyToBundles.get(relatedProperty); if (bundleBase == null) { // this property has no translation return null; } String filePath = bundleBase.replace('.', '/'); if (!"en".equals(locale.getLanguage())) { filePath += "_" + locale.getLanguage(); } filePath += "/" + filename; InputStream input = classloader.getResourceAsStream(filePath); if (input != null) { result = readInputStream(filePath, input); } return result; }
@Test public void return_null_if_file_not_found() { String html = underTest.messageFromFile(Locale.ENGLISH, "UnknownRule.html", "checkstyle.rule1.name"); assertThat(html).isNull(); }
public static synchronized Class<? extends HeartbeatTimer> getTimerClass(String name) { return sTimerClasses.get(name); }
@Test public void canTemporarilySwitchToScheduledTimer() throws Exception { try (ManuallyScheduleHeartbeat.Resource h = new ManuallyScheduleHeartbeat.Resource(ImmutableList.of(HeartbeatContext.WORKER_CLIENT))) { assertTrue(HeartbeatContext.getTimerClass(HeartbeatContext.WORKER_CLIENT) .isAssignableFrom(ScheduledTimer.class)); } assertTrue(HeartbeatContext.getTimerClass(HeartbeatContext.WORKER_CLIENT) .isAssignableFrom(SleepingTimer.class)); }
@Override public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException { for(Path file : files.keySet()) { try { callback.delete(file); if(containerService.isContainer(file)) { final Storage.Buckets.Delete request = session.getClient().buckets().delete(file.getName()); if(containerService.getContainer(file).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) { request.setUserProject(session.getHost().getCredentials().getUsername()); } request.execute(); } else { final Storage.Objects.Delete request = session.getClient().objects().delete(containerService.getContainer(file).getName(), containerService.getKey(file)); if(containerService.getContainer(file).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) { request.setUserProject(session.getHost().getCredentials().getUsername()); } final VersioningConfiguration versioning = null != session.getFeature(Versioning.class) ? session.getFeature(Versioning.class).getConfiguration( containerService.getContainer(file) ) : VersioningConfiguration.empty(); if(versioning.isEnabled()) { if(StringUtils.isNotBlank(file.attributes().getVersionId())) { // You permanently delete versions of objects by including the generation number in the deletion request request.setGeneration(Long.parseLong(file.attributes().getVersionId())); } } request.execute(); } } catch(IOException e) { final BackgroundException failure = new GoogleStorageExceptionMappingService().map("Cannot delete {0}", e, file); if(file.isDirectory()) { if(failure instanceof NotfoundException) { // No placeholder file may exist but we just have a common prefix continue; } } throw failure; } } }
@Test(expected = NotfoundException.class) public void testDeleteNotFoundKey() throws Exception { final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public boolean isFiltered(String path,TestElement sampler) { String ipAddr = getIpAddress(path); CookieManager cm = getCookieManager(ipAddr); ((HTTPSampler)sampler).setCookieManager(cm); return false; }
@Test public void testIsFiltered() throws Exception { Map<String, CookieManager> cm = new ConcurrentHashMap<>(); Set<CookieManager> inUse = Collections .synchronizedSet(Collections.newSetFromMap(new IdentityHashMap<>())); SessionFilter filter = new SessionFilter(cm, inUse); HTTPSampler sampler = new HTTPSampler(); filter.isFiltered("1.2.3.4 ...", sampler); assertSame(cm.get("1.2.3.4"), sampler.getCookieManager()); assertTrue(inUse.contains(sampler.getCookieManager())); }
List<StatisticsEntry> takeStatistics() { if (reporterEnabled) throw new IllegalStateException("Cannot take consistent snapshot while reporter is enabled"); var ret = new ArrayList<StatisticsEntry>(); consume((metric, value) -> ret.add(new StatisticsEntry(metric, value))); return ret; }
@Test void statistics_are_grouped_by_http_method_and_scheme() { testRequest("http", 200, "GET"); testRequest("http", 200, "PUT"); testRequest("http", 200, "POST"); testRequest("http", 200, "POST"); testRequest("http", 404, "GET"); testRequest("https", 404, "GET"); testRequest("https", 200, "POST"); testRequest("https", 200, "POST"); testRequest("https", 200, "POST"); testRequest("https", 200, "POST"); var stats = collector.takeStatistics(); assertStatisticsEntry(stats, "http", "GET", MetricDefinitions.RESPONSES_2XX, "read", 200, 1L); assertStatisticsEntry(stats, "http", "GET", MetricDefinitions.RESPONSES_4XX, "read", 404, 1L); assertStatisticsEntry(stats, "http", "PUT", MetricDefinitions.RESPONSES_2XX, "write", 200, 1L); assertStatisticsEntry(stats, "http", "POST", MetricDefinitions.RESPONSES_2XX, "write", 200, 2L); assertStatisticsEntry(stats, "https", "GET", MetricDefinitions.RESPONSES_4XX, "read", 404, 1L); assertStatisticsEntry(stats, "https", "POST", MetricDefinitions.RESPONSES_2XX, "write", 200, 4L); }
@Override public void forceSet(Long newValue) throws Exception { value.forceSet(valueToBytes(newValue)); }
@Test public void testForceSet() throws Exception { CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1)); client.start(); try { final DistributedAtomicLong dal = new DistributedAtomicLong(client, "/counter", new RetryOneTime(1)); ExecutorService executorService = Executors.newFixedThreadPool(2); executorService.submit(new Callable<Object>() { @Override public Object call() throws Exception { for (int i = 0; i < 1000; ++i) { dal.increment(); Thread.sleep(10); } return null; } }); executorService.submit(new Callable<Object>() { @Override public Object call() throws Exception { for (int i = 0; i < 1000; ++i) { dal.forceSet(0L); Thread.sleep(10); } return null; } }); assertTrue(dal.get().preValue() < 10); } finally { client.close(); } }
private Function<KsqlConfig, Kudf> getUdfFactory( final Method method, final UdfDescription udfDescriptionAnnotation, final String functionName, final FunctionInvoker invoker, final String sensorName ) { return ksqlConfig -> { final Object actualUdf = FunctionLoaderUtils.instantiateFunctionInstance( method.getDeclaringClass(), udfDescriptionAnnotation.name()); if (actualUdf instanceof Configurable) { ExtensionSecurityManager.INSTANCE.pushInUdf(); try { ((Configurable) actualUdf) .configure(ksqlConfig.getKsqlFunctionsConfigProps(functionName)); } finally { ExtensionSecurityManager.INSTANCE.popOutUdf(); } } final PluggableUdf theUdf = new PluggableUdf(invoker, actualUdf); return metrics.<Kudf>map(m -> new UdfMetricProducer( m.getSensor(sensorName), theUdf, Time.SYSTEM )).orElse(theUdf); }; }
@Test public void shouldPutJarUdfsInClassLoaderForJar() throws Exception { final UdfFactory toString = FUNC_REG.getUdfFactory(FunctionName.of("tostring")); final UdfFactory multiply = FUNC_REG.getUdfFactory(FunctionName.of("multiply")); final Kudf toStringUdf = toString.getFunction(ImmutableList.of(SqlArgument.of(SqlTypes.STRING))) .newInstance(ksqlConfig); final Kudf multiplyUdf = multiply.getFunction( Arrays.asList(SqlArgument.of(SqlTypes.INTEGER), SqlArgument.of(SqlTypes.INTEGER))) .newInstance(ksqlConfig); final ClassLoader multiplyLoader = getActualUdfClassLoader(multiplyUdf); assertThat(multiplyLoader, equalTo(getActualUdfClassLoader(toStringUdf))); assertThat(multiplyLoader, not(equalTo(PARENT_CLASS_LOADER))); }
@Override public void close() throws IOException { if (mClosed.getAndSet(true)) { return; } mLocalOutputStream.close(); try (BufferedInputStream in = new BufferedInputStream(new FileInputStream(mFile))) { ObjectMetadata objMeta = new ObjectMetadata(); objMeta.setContentLength(mFile.length()); if (mHash != null) { byte[] hashBytes = mHash.digest(); objMeta.setContentMD5(new String(Base64.encodeBase64(hashBytes))); } mContentHash = mOssClient.putObject(mBucketName, mKey, in, objMeta).getETag(); } catch (ServiceException e) { LOG.error("Failed to upload {}.", mKey); throw new IOException(e); } finally { // Delete the temporary file on the local machine if the OSS client completed the // upload or if the upload failed. if (!mFile.delete()) { LOG.error("Failed to delete temporary file @ {}", mFile.getPath()); } } }
@Test @PrepareForTest(OSSOutputStream.class) public void testConstructor() throws Exception { PowerMockito.whenNew(File.class).withArguments(Mockito.anyString()).thenReturn(mFile); String errorMessage = "protocol doesn't support output"; PowerMockito.whenNew(FileOutputStream.class).withArguments(mFile) .thenThrow(new IOException(errorMessage)); mThrown.expect(IOException.class); mThrown.expectMessage(errorMessage); new OSSOutputStream("testBucketName", "testKey", mOssClient, sConf.getList(PropertyKey.TMP_DIRS)).close(); }
@SuppressWarnings({"unchecked", "UnstableApiUsage"}) @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement) { if (!(statement.getStatement() instanceof DropStatement)) { return statement; } final DropStatement dropStatement = (DropStatement) statement.getStatement(); if (!dropStatement.isDeleteTopic()) { return statement; } final SourceName sourceName = dropStatement.getName(); final DataSource source = metastore.getSource(sourceName); if (source != null) { if (source.isSource()) { throw new KsqlException("Cannot delete topic for read-only source: " + sourceName.text()); } checkTopicRefs(source); deleteTopic(source); final Closer closer = Closer.create(); closer.register(() -> deleteKeySubject(source)); closer.register(() -> deleteValueSubject(source)); try { closer.close(); } catch (final KsqlException e) { throw e; } catch (final Exception e) { throw new KsqlException(e); } } else if (!dropStatement.getIfExists()) { throw new KsqlException("Could not find source to delete topic for: " + statement); } final T withoutDelete = (T) dropStatement.withoutDeleteClause(); final String withoutDeleteText = SqlFormatter.formatSql(withoutDelete) + ";"; return statement.withStatement(withoutDeleteText, withoutDelete); }
@Test public void shouldDeleteProtoSchemaInSR() throws IOException, RestClientException { // Given: when(topic.getValueFormat()).thenReturn(ValueFormat.of(FormatInfo.of(FormatFactory.PROTOBUF.name()), SerdeFeatures.of())); // When: deleteInjector.inject(DROP_WITH_DELETE_TOPIC); // Then: verify(registryClient).deleteSubject(KsqlConstants.getSRSubject("something", false)); }
public ProductFeature(String name, String description, String link) { this.name = name; this.description = description; this.link = link; }
@Test public void testProductFeature() { List<ProductFeature> features = ProductFeature.getFeatures(); Assert.assertEquals(1, features.size()); }
Object getFromSubworkflow(String subworkflowStepId, String stepId, String paramName) { try { return executor .submit(() -> fromSubworkflow(subworkflowStepId, stepId, paramName)) .get(TIMEOUT_IN_MILLIS, TimeUnit.MILLISECONDS); } catch (Exception e) { throw new MaestroInternalError( e, "getFromSubworkflow throws an exception for subworkflowStepId=[%s], stepId=[%s], paramName=[%s]", subworkflowStepId, stepId, paramName); } }
@Test public void testGetFromSubworkflow() throws Exception { StepRuntimeSummary summary = loadObject(TEST_SUBWORKFLOW_STEP_RUNTIME_SUMMARY, StepRuntimeSummary.class); when(allStepOutputData.get("foo")) .thenReturn(Collections.singletonMap("maestro_step_runtime_summary", summary)); StepInstance stepInSubworkflow = loadObject(TEST_STEP_INSTANCE, StepInstance.class); when(stepInstanceDao.getStepInstance(any(), anyLong(), anyLong(), any(), any())) .thenReturn(stepInSubworkflow); long res = (Long) paramExtension.getFromSubworkflow("foo", "job1", "sleep_seconds"); assertEquals(15, res); }
public void calculate(IThrowableProxy tp) { while (tp != null) { populateFrames(tp.getStackTraceElementProxyArray()); IThrowableProxy[] suppressed = tp.getSuppressed(); if(suppressed != null) { for(IThrowableProxy current:suppressed) { populateFrames(current.getStackTraceElementProxyArray()); } } tp = tp.getCause(); } }
@Test // Test http://jira.qos.ch/browse/LBCLASSIC-125 public void noClassDefFoundError_LBCLASSIC_125Test() throws MalformedURLException { ClassLoader cl = (URLClassLoader) makeBogusClassLoader(); Thread.currentThread().setContextClassLoader(cl); Throwable t = new Throwable("x"); ThrowableProxy tp = new ThrowableProxy(t); StackTraceElementProxy[] stepArray = tp.getStackTraceElementProxyArray(); StackTraceElement bogusSTE = new StackTraceElement("com.Bogus", "myMethod", "myFile", 12); stepArray[0] = new StackTraceElementProxy(bogusSTE); PackagingDataCalculator pdc = tp.getPackagingDataCalculator(); // NoClassDefFoundError should be caught pdc.calculate(tp); }
@Override public Query normalizeQuery(final Query query, final ParameterProvider parameterProvider) { return query.toBuilder() .query(ElasticsearchQueryString.of(this.queryStringDecorators.decorate(query.query().queryString(), parameterProvider, query))) .filter(normalizeFilter(query.filter(), query, parameterProvider)) .searchTypes(query.searchTypes().stream().map(searchType -> normalizeSearchType(searchType, query, parameterProvider)).collect(Collectors.toSet())) .build(); }
@Test void decoratesQueryStringFilters() { final Query query = Query.builder() .filter(QueryStringFilter.builder().query("action:index").build()) .build(); final Query normalizedQuery = decorateQueryStringsNormalizer.normalizeQuery(query, name -> Optional.empty()); assertThat(normalizedQuery) .extracting(Query::filter) .matches(queryFilter -> (queryFilter instanceof QueryStringFilter && ((QueryStringFilter) queryFilter).query().equals("Hey there!"))); }
public Checksum checksum(final ChecksumCompute checksum, final List<StorageObject> objects) throws BackgroundException { final StringBuilder concatenated = new StringBuilder(); for(StorageObject s : objects) { concatenated.append(s.getMd5sum()); } return checksum.compute(IOUtils.toInputStream(concatenated.toString(), Charset.defaultCharset()), new TransferStatus()); }
@Test public void testChecksum() throws Exception { final SwiftSegmentService service = new SwiftSegmentService(session); final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final Path file = new Path(container, "a", EnumSet.of(Path.Type.file)); final StorageObject a = new StorageObject("a"); a.setMd5sum("m1"); a.setSize(1L); final StorageObject b = new StorageObject("b"); b.setMd5sum("m2"); b.setSize(1L); final Checksum checksum = service.checksum(new MD5ChecksumCompute(), Arrays.asList(a, b)); assertEquals(new MD5ChecksumCompute().compute(IOUtils.toInputStream("m1m2", Charset.defaultCharset()), new TransferStatus()), checksum); }
void addMulti(String name, Collection<String> values) { if (!values.isEmpty()) { filters.put("Filter." + index + ".Name", name); int valueIndex = 1; for (String value : values) { filters.put(String.format("Filter.%d.Value.%d", index, valueIndex++), value); } ++index; } }
@Test public void addMulti() { // given Filter filter = new Filter(); // when filter.addMulti("key", asList("value", "second-value")); // then Map<String, String> result = filter.getFilterAttributes(); // then assertEquals(3, result.size()); assertEquals("key", result.get("Filter.1.Name")); assertEquals("value", result.get("Filter.1.Value.1")); assertEquals("second-value", result.get("Filter.1.Value.2")); }
public static String getDeviceInfo(Context mContext, String androidId, String oaid, String reflectionOAID) { return String.format("android_id=%s##imei=%s##imei_old=%s##imei_slot1=%s##imei_slot2=%s##imei_meid=%s##mac=%s##oaid=%s##oaid_reflection=%s", androidId, "", "", "", "","","", oaid, reflectionOAID); }
@Test public void getDeviceInfo() { String str = ChannelUtils.getDeviceInfo(mApplication, "", "abdd12312838_oaid","abdd12312838_oaid"); Assert.assertNotNull(str); }
public void setDestinationType(String destinationType) { this.destinationType = destinationType; }
@Test(timeout = 60000) public void testTopicDestinationType() { activationSpec.setDestinationType(Topic.class.getName()); assertActivationSpecValid(); }