focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static <T> SamplerFunction<T> nullSafe(SamplerFunction<T> delegate) { if (delegate == null) throw new NullPointerException("delegate == null"); if (delegate instanceof Constants || delegate instanceof NullSafe) return delegate; return new NullSafe<T>(delegate); }
@Test void nullSafe_doesntDoubleWrap() { SamplerFunction<Object> sampler = nullSafe(o -> { throw new NullPointerException(); }); assertThat(sampler).isSameAs(sampler); }
@Command(description = "Starts a new Hazelcast member", mixinStandardHelpOptions = true, sortOptions = false) void start( @Option(names = {"-c", "--config"}, paramLabel = "<file>", description = "Use <file> for Hazelcast " + "configuration. " + "Accepted formats are XML and YAML. ") String configFilePath, @Option(names = {"-p", "--port"}, paramLabel = "<port>", description = "Bind to the specified <port>. Please note that if the specified port is in use, " + "it will auto-increment to the first free port. (default: 5701)") String port, @Option(names = {"-i", "--interface"}, paramLabel = "<interface>", description = "Bind to the specified <interface>.") String hzInterface) { if (!isNullOrEmpty(configFilePath)) { System.setProperty("hazelcast.config", configFilePath); } if (!isNullOrEmpty(port)) { System.setProperty("hz.network.port.port", port); } if (!isNullOrEmpty(hzInterface)) { System.setProperty("hz.network.interfaces.enabled", "true"); System.setProperty("hz.socket.bind.any", "false"); System.setProperty("hz.network.interfaces.interfaces.interface1", hzInterface); } start.run(); }
@Test void test_start_withConfigFile() { // given String configFile = "path/to/test-hazelcast.xml"; // when hazelcastServerCommandLine.start(configFile, null, null); assertThat(System.getProperties()).containsEntry("hazelcast.config", configFile); }
@Udf public String decodeParam( @UdfParameter(description = "the value to decode") final String input) { if (input == null) { return null; } try { return URLDecoder.decode(input, UTF_8.name()); } catch (final UnsupportedEncodingException e) { throw new KsqlFunctionException( "url_decode udf encountered an encoding exception while decoding: " + input, e); } }
@Test public void shouldReturnSpecialCharsIntact() { assertThat(".-*_ should all pass through without being encoded", decodeUdf.decodeParam("foo.-*_bar"), equalTo("foo.-*_bar")); }
@Udf public Map<String, String> splitToMap( @UdfParameter( description = "Separator string and values to join") final String input, @UdfParameter( description = "Separator string and values to join") final String entryDelimiter, @UdfParameter( description = "Separator string and values to join") final String kvDelimiter) { if (input == null || entryDelimiter == null || kvDelimiter == null) { return null; } if (entryDelimiter.isEmpty() || kvDelimiter.isEmpty() || entryDelimiter.equals(kvDelimiter)) { return null; } final Iterable<String> entries = Splitter.on(entryDelimiter).omitEmptyStrings().split(input); return StreamSupport.stream(entries.spliterator(), false) .filter(e -> e.contains(kvDelimiter)) .map(kv -> Splitter.on(kvDelimiter).split(kv).iterator()) .collect(Collectors.toMap( Iterator::next, Iterator::next, (v1, v2) -> v2)); }
@Test public void shouldRetainWhitespacebetweenDelimiters() { Map<String, String> result = udf.splitToMap("foo :=\tapple || bar:=cherry", "||", ":="); assertThat(result, hasEntry("foo ", "\tapple ")); assertThat(result, hasEntry(" bar", "cherry")); assertThat(result.size(), equalTo(2)); }
public static Map<JobVertexID, ForwardGroup> computeForwardGroups( final Iterable<JobVertex> topologicallySortedVertices, final Function<JobVertex, Set<JobVertex>> forwardProducersRetriever) { final Map<JobVertex, Set<JobVertex>> vertexToGroup = new IdentityHashMap<>(); // iterate all the vertices which are topologically sorted for (JobVertex vertex : topologicallySortedVertices) { Set<JobVertex> currentGroup = new HashSet<>(); currentGroup.add(vertex); vertexToGroup.put(vertex, currentGroup); for (JobVertex producerVertex : forwardProducersRetriever.apply(vertex)) { final Set<JobVertex> producerGroup = vertexToGroup.get(producerVertex); if (producerGroup == null) { throw new IllegalStateException( "Producer task " + producerVertex.getID() + " forward group is null" + " while calculating forward group for the consumer task " + vertex.getID() + ". This should be a forward group building bug."); } if (currentGroup != producerGroup) { currentGroup = VertexGroupComputeUtil.mergeVertexGroups( currentGroup, producerGroup, vertexToGroup); } } } final Map<JobVertexID, ForwardGroup> ret = new HashMap<>(); for (Set<JobVertex> vertexGroup : VertexGroupComputeUtil.uniqueVertexGroups(vertexToGroup)) { if (vertexGroup.size() > 1) { ForwardGroup forwardGroup = new ForwardGroup(vertexGroup); for (JobVertexID jobVertexId : forwardGroup.getJobVertexIds()) { ret.put(jobVertexId, forwardGroup); } } } return ret; }
@Test void testIsolatedVertices() throws Exception { JobVertex v1 = new JobVertex("v1"); JobVertex v2 = new JobVertex("v2"); JobVertex v3 = new JobVertex("v3"); Set<ForwardGroup> groups = computeForwardGroups(v1, v2, v3); checkGroupSize(groups, 0); }
public static <K, V> WithKeys<K, V> of(SerializableFunction<V, K> fn) { checkNotNull( fn, "WithKeys constructed with null function. Did you mean WithKeys.of((Void) null)?"); return new WithKeys<>(fn, null); }
@Test @Category(NeedsRunner.class) public void testConstantVoidKeys() { PCollection<String> input = p.apply(Create.of(Arrays.asList(COLLECTION)).withCoder(StringUtf8Coder.of())); PCollection<KV<Void, String>> output = input.apply(WithKeys.of((Void) null)); PAssert.that(output).containsInAnyOrder(WITH_CONST_NULL_KEYS); p.run(); }
public static int colNameToIndex(String colName) { int length = colName.length(); char c; int index = -1; for (int i = 0; i < length; i++) { c = Character.toUpperCase(colName.charAt(i)); if (Character.isDigit(c)) { break;// 确定指定的char值是否为数字 } index = (index + 1) * 26 + (int) c - 'A'; } return index; }
@Test public void colNameToIndexTest() { assertEquals(704, ExcelUtil.colNameToIndex("AAC")); assertEquals(703, ExcelUtil.colNameToIndex("AAB")); assertEquals(702, ExcelUtil.colNameToIndex("AAA")); assertEquals(28, ExcelUtil.colNameToIndex("AC")); assertEquals(27, ExcelUtil.colNameToIndex("AB")); assertEquals(26, ExcelUtil.colNameToIndex("AA")); assertEquals(2, ExcelUtil.colNameToIndex("C")); assertEquals(1, ExcelUtil.colNameToIndex("B")); assertEquals(0, ExcelUtil.colNameToIndex("A")); }
public RowExpression extract(PlanNode node) { return node.accept(new Visitor(domainTranslator, functionAndTypeManager), null); }
@Test public void testRightJoinWithFalseInner() { List<EquiJoinClause> criteria = ImmutableList.of(new EquiJoinClause(AV, DV)); Map<VariableReferenceExpression, ColumnHandle> leftAssignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(AV, BV, CV))); TableScanNode leftScan = tableScanNode(leftAssignments); Map<VariableReferenceExpression, ColumnHandle> rightAssignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(DV, EV, FV))); TableScanNode rightScan = tableScanNode(rightAssignments); FilterNode left = filter(leftScan, FALSE_CONSTANT); FilterNode right = filter(rightScan, and( equals(DV, EV), lessThan(FV, bigintLiteral(100)))); PlanNode node = new JoinNode( Optional.empty(), newId(), JoinType.RIGHT, left, right, criteria, ImmutableList.<VariableReferenceExpression>builder() .addAll(left.getOutputVariables()) .addAll(right.getOutputVariables()) .build(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of()); RowExpression effectivePredicate = effectivePredicateExtractor.extract(node); // False literal on the left side should be ignored assertEquals(normalizeConjuncts(effectivePredicate), normalizeConjuncts(equals(DV, EV), lessThan(FV, bigintLiteral(100)), or(equals(AV, DV), isNull(AV)))); }
public B application(ApplicationConfig application) { this.application = application; return getThis(); }
@Test void application() { ApplicationConfig applicationConfig = new ApplicationConfig("AbtractInterfaceBuilderTest"); InterfaceBuilder builder = new InterfaceBuilder(); builder.application(applicationConfig); Assertions.assertEquals(applicationConfig, builder.build().getApplication()); }
@Nullable public static String decrypt(String cipherText, String encryptionKey, String salt) { try { return tryDecrypt(cipherText, encryptionKey, salt); } catch (Exception ex) { LOG.error("Could not decrypt (legacy) value.", ex); return null; } }
@Test public void testDecryptStaticISO10126PaddedLongCipherText() { // The cipherText was encrypted using the legacy AES/CBC/ISO10126Padding transformation. // If this test fails, we changed the transformation. If the change was intentional, this test must // be updated, and we need to create a migration to re-encrypt all existing secrets in the database. // Otherwise, existing secrets cannot be decrypted anymore! final String cipherText = "d8d9ade5456543950e7ff7441b7157ed71564f5b7d656098a8ec87c074ed2d333a797711e06817135ef6d7cfce0a2eb6"; final String salt = "17c4bd3761b530f7"; final String decrypt = AESTools.decrypt(cipherText, "1234567890123456", salt); Assert.assertEquals("I am a very very very long secret", decrypt); }
@Override public void executor(final Collection<ApiDocRegisterDTO> dataList) { for (ApiDocRegisterDTO apiDocRegisterDTO : dataList) { shenyuClientRegisterRepository.persistApiDoc(apiDocRegisterDTO); } }
@Test public void testExecutorValidData() { Collection<ApiDocRegisterDTO> apiDocRegisterDTOList = new ArrayList<>(); ApiDocRegisterDTO apiDocRegisterDTO = ApiDocRegisterDTO.builder().contextPath("/test").apiPath("/api").httpMethod(0).consume("application/json").produce("application/json").version("V0.01").rpcType("http").state(1).ext("test") .apiOwner("test").apiDesc("test").apiSource(0).document("test").eventType(EventType.UPDATED).tags(new ArrayList<>()).build(); apiDocRegisterDTOList.add(apiDocRegisterDTO); executorSubscriber.executor(apiDocRegisterDTOList); verify(shenyuClientRegisterRepository, times(1)).persistApiDoc(apiDocRegisterDTO); }
public static void main(String[] args) { var kingJoffrey = new KingJoffrey(); var kingsHand = new KingsHand(); kingsHand.registerObserver(kingJoffrey, Event.TRAITOR_DETECTED); kingsHand.registerObserver(kingJoffrey, Event.STARK_SIGHTED); kingsHand.registerObserver(kingJoffrey, Event.WARSHIPS_APPROACHING); kingsHand.registerObserver(kingJoffrey, Event.WHITE_WALKERS_SIGHTED); var varys = new LordVarys(); varys.registerObserver(kingsHand, Event.TRAITOR_DETECTED); varys.registerObserver(kingsHand, Event.WHITE_WALKERS_SIGHTED); var scout = new Scout(); scout.registerObserver(kingsHand, Event.WARSHIPS_APPROACHING); scout.registerObserver(varys, Event.WHITE_WALKERS_SIGHTED); var baelish = new LordBaelish(kingsHand, Event.STARK_SIGHTED); var emitters = List.of( kingsHand, baelish, varys, scout ); Arrays.stream(Weekday.values()) .<Consumer<? super EventEmitter>>map(day -> emitter -> emitter.timePasses(day)) .forEachOrdered(emitters::forEach); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
public static String getJobConfigurationPath(final String jobId) { return String.join("/", getJobRootPath(jobId), "config"); }
@Test void assertGetJobConfigPath() { assertThat(PipelineMetaDataNode.getJobConfigurationPath(jobId), is(jobRootPath + "/config")); }
@Override public int read() throws EOFException { return (pos < size) ? (data[pos++] & 0xff) : -1; }
@Test(expected = NullPointerException.class) public void testReadForBOffLen_null_array() throws Exception { in.read(null, 0, 1); }
@Override public void finished(boolean allStepsExecuted) { if (postProjectAnalysisTasks.length == 0) { return; } ProjectAnalysisImpl projectAnalysis = createProjectAnalysis(allStepsExecuted ? SUCCESS : FAILED); for (PostProjectAnalysisTask postProjectAnalysisTask : postProjectAnalysisTasks) { executeTask(projectAnalysis, postProjectAnalysisTask); } }
@Test public void qualityGate_is_populated_when_finished_method_argument_is_true() { underTest.finished(true); verify(postProjectAnalysisTask).finished(taskContextCaptor.capture()); org.sonar.api.ce.posttask.QualityGate qualityGate = taskContextCaptor.getValue().getProjectAnalysis().getQualityGate(); assertThat(qualityGate.getStatus()).isEqualTo(org.sonar.api.ce.posttask.QualityGate.Status.OK); assertThat(qualityGate.getId()).isEqualTo(QUALITY_GATE_UUID); assertThat(qualityGate.getName()).isEqualTo(QUALITY_GATE_NAME); assertThat(qualityGate.getConditions()).hasSize(2); }
@Override public String getStatusString(String appIdOrName) throws IOException, YarnException { String output = ""; String appName; try { ApplicationId appId = ApplicationId.fromString(appIdOrName); ApplicationReport appReport = yarnClient.getApplicationReport(appId); appName = appReport.getName(); } catch (IllegalArgumentException e) { // not app Id format, may be app name appName = appIdOrName; ServiceApiUtil.validateNameFormat(appName, getConfig()); } try { ClientResponse response = getApiClient(getServicePath(appName)) .get(ClientResponse.class); if (response.getStatus() == 404) { StringBuilder sb = new StringBuilder(); sb.append(" Service ") .append(appName) .append(" not found"); return sb.toString(); } if (response.getStatus() != 200) { StringBuilder sb = new StringBuilder(); sb.append(appName) .append(" Failed : HTTP error code : ") .append(response.getStatus()); return sb.toString(); } output = response.getEntity(String.class); } catch (Exception e) { LOG.error("Fail to check application status: ", e); } return output; }
@Test void testStatus() { String appName = "nonexistent-app"; try { String result = asc.getStatusString(appName); assertEquals(" Service " + appName + " not found", result, "Status reponse don't match"); } catch (IOException | YarnException e) { fail(); } }
public static String truncateMessageLineLength(Object message) { return truncateMessageLineLength(message, MAX_TRUNCATED_LENGTH); }
@Test public void truncateLongLines() throws Exception { for (int i = 0; i < 20; i++) { String s = ""; // generate lines both short and long int lines = ThreadLocalRandom.current().nextInt(5, 10); for (int j = 0; j < lines; j++) { s += "\n"; if (j % 2 == 0) { s += CommonUtils.randomAlphaNumString( ThreadLocalRandom.current().nextInt(1, LogUtils.MAX_TRUNCATED_LENGTH + 1)); } else { s = CommonUtils.randomAlphaNumString(ThreadLocalRandom.current() .nextInt(LogUtils.MAX_TRUNCATED_LENGTH + 1, 2 + LogUtils.MAX_TRUNCATED_LENGTH)); } } String truncated = LogUtils.truncateMessageLineLength(s); String[] expectedLines = s.split("\n"); String[] actualLines = truncated.split("\n"); assertEquals(expectedLines.length, actualLines.length); // check each line for (int j = 0; j < expectedLines.length; j++) { if (expectedLines[j].length() <= LogUtils.MAX_TRUNCATED_LENGTH) { assertEquals(expectedLines[j], actualLines[j]); } else { assertTrue(actualLines[j] .startsWith(expectedLines[j].substring(0, LogUtils.MAX_TRUNCATED_LENGTH) + " ...")); } } } }
@Override public String getManagedGroupsSqlFilter(boolean filterByManaged) { return findManagedInstanceService() .map(managedInstanceService -> managedInstanceService.getManagedGroupsSqlFilter(filterByManaged)) .orElseThrow(() -> NOT_MANAGED_INSTANCE_EXCEPTION); }
@Test public void getManagedGroupsSqlFilter_delegatesToRightService_andPropagateAnswer() { AlwaysManagedInstanceService alwaysManagedInstanceService = new AlwaysManagedInstanceService(); DelegatingManagedServices managedInstanceService = new DelegatingManagedServices(Set.of(new NeverManagedInstanceService(), alwaysManagedInstanceService)); assertThat(managedInstanceService.getManagedGroupsSqlFilter(true)).isNotNull().isEqualTo(alwaysManagedInstanceService.getManagedGroupsSqlFilter( true)); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return PathAttributes.EMPTY; } if(file.getType().contains(Path.Type.upload)) { // Pending large file upload final Write.Append append = new B2LargeUploadService(session, fileid, new B2WriteFeature(session, fileid)).append(file, new TransferStatus()); if(append.append) { return new PathAttributes().withSize(append.offset); } return PathAttributes.EMPTY; } if(containerService.isContainer(file)) { try { final B2BucketResponse info = session.getClient().listBucket(file.getName()); if(null == info) { throw new NotfoundException(file.getAbsolute()); } return this.toAttributes(info); } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Failure to read attributes of {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e); } } else { final String id = fileid.getVersionId(file); if(null == id) { return PathAttributes.EMPTY; } B2FileResponse response; try { response = this.findFileInfo(file, id); } catch(NotfoundException e) { // Try with reset cache after failure finding node id response = this.findFileInfo(file, fileid.getVersionId(file)); } final PathAttributes attr = this.toAttributes(response); if(attr.isDuplicate()) { // Throw failure if latest version has hide marker set and lookup was without explicit version if(StringUtils.isBlank(file.attributes().getVersionId())) { if(log.isDebugEnabled()) { log.debug(String.format("Latest version of %s is duplicate", file)); } throw new NotfoundException(file.getAbsolute()); } } return attr; } }
@Test public void testFindDirectory() throws Exception { final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path bucket = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path directory = new B2DirectoryFeature(session, fileid).mkdir(new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final B2AttributesFinderFeature f = new B2AttributesFinderFeature(session, fileid); final PathAttributes attributes = f.find(directory); assertNotNull(attributes); assertNotEquals(PathAttributes.EMPTY, attributes); // Test wrong type try { f.find(new Path(directory.getAbsolute(), EnumSet.of(Path.Type.file))); fail(); } catch(NotfoundException e) { // Expected } new B2DeleteFeature(session, fileid).delete(Collections.singletonList(directory), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public OffsetRange[] getNextOffsetRanges(Option<String> lastCheckpointStr, long sourceLimit, HoodieIngestionMetrics metrics) { // Come up with final set of OffsetRanges to read (account for new partitions, limit number of events) long maxEventsToReadFromKafka = getLongWithAltKeys(props, KafkaSourceConfig.MAX_EVENTS_FROM_KAFKA_SOURCE); long numEvents; if (sourceLimit == Long.MAX_VALUE) { numEvents = maxEventsToReadFromKafka; LOG.info("SourceLimit not configured, set numEvents to default value : {}", maxEventsToReadFromKafka); } else { numEvents = sourceLimit; } long minPartitions = getLongWithAltKeys(props, KafkaSourceConfig.KAFKA_SOURCE_MIN_PARTITIONS); LOG.info("getNextOffsetRanges set config {} to {}", KafkaSourceConfig.KAFKA_SOURCE_MIN_PARTITIONS.key(), minPartitions); return getNextOffsetRanges(lastCheckpointStr, numEvents, minPartitions, metrics); }
@Test public void testGetNextOffsetRangesWithMinPartitionsForSinglePartition() { HoodieTestDataGenerator dataGenerator = new HoodieTestDataGenerator(); testUtils.createTopic(testTopicName, 1); testUtils.sendMessages(testTopicName, Helpers.jsonifyRecords(dataGenerator.generateInserts("000", 1000))); TypedProperties props = getConsumerConfigs("earliest", KAFKA_CHECKPOINT_TYPE_STRING); // default no minPartition set KafkaOffsetGen kafkaOffsetGen = new KafkaOffsetGen(props); OffsetRange[] nextOffsetRanges = kafkaOffsetGen.getNextOffsetRanges(Option.empty(), 300, metrics); assertEquals(0, nextOffsetRanges[0].fromOffset()); assertEquals(300, nextOffsetRanges[0].untilOffset()); props.put(KafkaSourceConfig.KAFKA_SOURCE_MIN_PARTITIONS.key(), 2L); // just to check warn-message manually if props contains deprecated config props.put(KafkaSourceConfig.KAFKA_FETCH_PARTITION_TIME_OUT.key(), 1L); kafkaOffsetGen = new KafkaOffsetGen(props); nextOffsetRanges = kafkaOffsetGen.getNextOffsetRanges(Option.empty(), 300, metrics); assertEquals(0, nextOffsetRanges[0].fromOffset()); assertEquals(150, nextOffsetRanges[0].untilOffset()); assertEquals(150, nextOffsetRanges[1].fromOffset()); assertEquals(300, nextOffsetRanges[1].untilOffset()); }
public void translate(Pipeline pipeline) { this.flinkBatchEnv = null; this.flinkStreamEnv = null; final boolean hasUnboundedOutput = PipelineTranslationModeOptimizer.hasUnboundedOutput(pipeline); if (hasUnboundedOutput) { LOG.info("Found unbounded PCollection. Switching to streaming execution."); options.setStreaming(true); } // Staged files need to be set before initializing the execution environments prepareFilesToStageForRemoteClusterExecution(options); FlinkPipelineTranslator translator; if (options.isStreaming() || options.getUseDataStreamForBatch()) { this.flinkStreamEnv = FlinkExecutionEnvironments.createStreamExecutionEnvironment(options); if (hasUnboundedOutput && !flinkStreamEnv.getCheckpointConfig().isCheckpointingEnabled()) { LOG.warn( "UnboundedSources present which rely on checkpointing, but checkpointing is disabled."); } translator = new FlinkStreamingPipelineTranslator(flinkStreamEnv, options, options.isStreaming()); if (!options.isStreaming()) { flinkStreamEnv.setRuntimeMode(RuntimeExecutionMode.BATCH); } } else { this.flinkBatchEnv = FlinkExecutionEnvironments.createBatchExecutionEnvironment(options); translator = new FlinkBatchPipelineTranslator(flinkBatchEnv, options); } // Transform replacements need to receive the finalized PipelineOptions // including execution mode (batch/streaming) and parallelism. pipeline.replaceAll(FlinkTransformOverrides.getDefaultOverrides(options)); translator.translate(pipeline); }
@Test public void shouldUseTransformOverrides() { boolean[] testParameters = {true, false}; for (boolean streaming : testParameters) { FlinkPipelineOptions options = getDefaultPipelineOptions(); options.setStreaming(streaming); options.setRunner(FlinkRunner.class); FlinkPipelineExecutionEnvironment flinkEnv = new FlinkPipelineExecutionEnvironment(options); Pipeline p = Mockito.spy(Pipeline.create(options)); flinkEnv.translate(p); ArgumentCaptor<ImmutableList> captor = ArgumentCaptor.forClass(ImmutableList.class); Mockito.verify(p).replaceAll(captor.capture()); ImmutableList<PTransformOverride> overridesList = captor.getValue(); assertThat(overridesList.isEmpty(), is(false)); assertThat( overridesList.size(), is(FlinkTransformOverrides.getDefaultOverrides(options).size())); } }
public void reportJobCompletion(String pluginId, String elasticAgentId, JobIdentifier jobIdentifier, Map<String, String> elasticProfileConfiguration, Map<String, String> clusterProfileConfiguration) { LOGGER.debug("Processing report job completion for plugin: {} for elasticAgentId: {} for job: {} with configuration: {} in cluster: {}", pluginId, elasticAgentId, jobIdentifier, elasticProfileConfiguration, clusterProfileConfiguration); extension.reportJobCompletion(pluginId, elasticAgentId, jobIdentifier, elasticProfileConfiguration, clusterProfileConfiguration); LOGGER.debug("Done processing report job completion for plugin: {} for elasticAgentId: {} for job: {} with configuration: {} in cluster: {}", pluginId, elasticAgentId, jobIdentifier, elasticProfileConfiguration, clusterProfileConfiguration); }
@Test public void shouldTalkToExtensionToReportJobCompletion() { final JobIdentifier jobIdentifier = new JobIdentifier(); final String elasticAgentId = "ea_1"; final Map<String, String> elasticProfileConfiguration = Map.of("Image", "alpine:latest"); final Map<String, String> clusterProfileConfiguration = Map.of("ServerURL", "https://example.com/go"); elasticAgentPluginRegistry.reportJobCompletion(PLUGIN_ID, elasticAgentId, jobIdentifier, elasticProfileConfiguration, clusterProfileConfiguration); verify(elasticAgentExtension, times(1)).reportJobCompletion(PLUGIN_ID, elasticAgentId, jobIdentifier, elasticProfileConfiguration, clusterProfileConfiguration); verifyNoMoreInteractions(elasticAgentExtension); }
@Override public Long put(char key, Long value) { verifyStandardSetting(key, value); return super.put(key, value); }
@Test public void settingsShouldSupportUnsignedShort() { char key = (char) (Short.MAX_VALUE + 1); settings.put(key, (Long) 123L); assertEquals(123L, (long) settings.get(key)); }
@ExecuteOn(TaskExecutors.IO) @Get(uri = "/{executionId}") @Operation(tags = {"Executions"}, summary = "Get an execution") public Execution get( @Parameter(description = "The execution id") @PathVariable String executionId ) { return executionRepository .findById(tenantService.resolveTenant(), executionId) .orElse(null); }
@Test @EnabledIfEnvironmentVariable(named = "SECRET_WEBHOOK_KEY", matches = ".*") void webhookDynamicKeyFromASecret() { Execution execution = client.toBlocking().retrieve( GET( "/api/v1/executions/webhook/" + TESTS_FLOW_NS + "/webhook-secret-key/secretKey" ), Execution.class ); assertThat(execution, notNullValue()); assertThat(execution.getId(), notNullValue()); }
@Override protected void encode(ChannelHandlerContext ctx, Object msg, ByteBuf out) throws Exception { byte[] bytes = null; if (msg instanceof XmppStreamEvent) { XmppStreamEvent streamEvent = (XmppStreamEvent) msg; logger.info("SENDING: {}", streamEvent.toXml()); bytes = streamEvent.toXml().getBytes(CharsetUtil.UTF_8); } if (msg instanceof Packet) { Packet pkt = (Packet) msg; logger.info("SENDING /n, {}", pkt.toString()); bytes = pkt.toXML().getBytes(CharsetUtil.UTF_8); } out.writeBytes(checkNotNull(bytes)); }
@Test public void testEncode() throws Exception { Packet iq = new IQ(); ByteBuf buffer = Unpooled.buffer(); xmppEncoder.encode(channelHandlerContext, iq, buffer); assertThat(buffer.hasArray(), Matchers.is(true)); }
public static String formatHostnameForHttp(InetSocketAddress addr) { String hostString = NetUtil.getHostname(addr); if (NetUtil.isValidIpV6Address(hostString)) { if (!addr.isUnresolved()) { hostString = NetUtil.toAddressString(addr.getAddress()); } else if (hostString.charAt(0) == '[' && hostString.charAt(hostString.length() - 1) == ']') { // If IPv6 address already contains brackets, let's return as is. return hostString; } return '[' + hostString + ']'; } return hostString; }
@Test public void testIpv4Unresolved() { InetSocketAddress socketAddress = InetSocketAddress.createUnresolved("10.0.0.1", 8080); assertEquals("10.0.0.1", HttpUtil.formatHostnameForHttp(socketAddress)); }
@Override public int hashCode() { StringBuilder stringBuilder = new StringBuilder(); for (Entry<String, Object> entry : getAllLocalProperties().entrySet()) { stringBuilder.append(entry.getKey()).append(entry.getValue()); } return Objects.hashCode(poolClassName, stringBuilder.toString()); }
@Test void assertDifferentHashCodeWithDifferentDataSourceClassName() { assertThat(new DataSourcePoolProperties("FooDataSourceClass", createUserProperties("foo")).hashCode(), not(new DataSourcePoolProperties("BarDataSourceClass", createUserProperties("foo")).hashCode())); }
public static TopicMessageType getMessageType(SendMessageRequestHeader requestHeader) { Map<String, String> properties = MessageDecoder.string2messageProperties(requestHeader.getProperties()); String traFlag = properties.get(MessageConst.PROPERTY_TRANSACTION_PREPARED); TopicMessageType topicMessageType = TopicMessageType.NORMAL; if (Boolean.parseBoolean(traFlag)) { topicMessageType = TopicMessageType.TRANSACTION; } else if (properties.containsKey(MessageConst.PROPERTY_SHARDING_KEY)) { topicMessageType = TopicMessageType.FIFO; } else if (properties.get("__STARTDELIVERTIME") != null || properties.get(MessageConst.PROPERTY_DELAY_TIME_LEVEL) != null || properties.get(MessageConst.PROPERTY_TIMER_DELIVER_MS) != null || properties.get(MessageConst.PROPERTY_TIMER_DELAY_SEC) != null || properties.get(MessageConst.PROPERTY_TIMER_DELAY_MS) != null) { topicMessageType = TopicMessageType.DELAY; } return topicMessageType; }
@Test public void testGetMessageTypeAsDeliverMS() { SendMessageRequestHeader requestHeader = new SendMessageRequestHeader(); Map<String, String> map = new HashMap<>(); map.put(MessageConst.PROPERTY_TIMER_DELIVER_MS, "10"); requestHeader.setProperties(MessageDecoder.messageProperties2String(map)); TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader); assertThat(TopicMessageType.DELAY).isEqualTo(result); }
UuidGenerator loadUuidGenerator() { Class<? extends UuidGenerator> objectFactoryClass = options.getUuidGeneratorClass(); ClassLoader classLoader = classLoaderSupplier.get(); ServiceLoader<UuidGenerator> loader = ServiceLoader.load(UuidGenerator.class, classLoader); if (objectFactoryClass == null) { return loadSingleUuidGeneratorOrDefault(loader); } return loadSelectedUuidGenerator(loader, objectFactoryClass); }
@Test void test_case_3() { Options options = () -> RandomUuidGenerator.class; UuidGeneratorServiceLoader loader = new UuidGeneratorServiceLoader( UuidGeneratorServiceLoaderTest.class::getClassLoader, options); assertThat(loader.loadUuidGenerator(), instanceOf(RandomUuidGenerator.class)); }
static InputStream createDummyInputStream(int payloadSize) { Instant end = Instant.now().plusSeconds(60); return createDummyInputStream(payloadSize, ThreadLocalRandom.current(), () -> Instant.now().isBefore(end)); }
@Test void testDummyStream() throws IOException { AtomicInteger count = new AtomicInteger(3); InputStream in = CliClient.createDummyInputStream(4, new Random(0), () -> count.decrementAndGet() >= 0); byte[] buffer = new byte[1 << 20]; int offset = 0, read; while ((read = in.read(buffer, offset, buffer.length - offset)) >= 0) offset += read; assertEquals("{ \"put\": \"id:test:test::ssxvnjhp\", \"fields\": { \"test\": \"dqdx\" } }\n" + "{ \"put\": \"id:test:test::vcrastvy\", \"fields\": { \"test\": \"bcwv\" } }\n" + "{ \"put\": \"id:test:test::mgnykrxv\", \"fields\": { \"test\": \"zxkg\" } }\n", new String(buffer, 0, offset, StandardCharsets.UTF_8)); }
@Override public String count(List<String> where) { StringBuilder sql = new StringBuilder(); String method = "SELECT "; sql.append(method); sql.append("COUNT(*) FROM "); sql.append(getTableName()); sql.append(" "); if (null == where || where.size() == 0) { return sql.toString(); } appendWhereClause(where, sql); return sql.toString(); }
@Test void testCountAll() { String sql = abstractMapper.count(null); assertEquals("SELECT COUNT(*) FROM tenant_info ", sql); }
public Span nextSpan(TraceContextOrSamplingFlags extracted) { if (extracted == null) throw new NullPointerException("extracted == null"); TraceContext context = extracted.context(); if (context != null) return newChild(context); TraceIdContext traceIdContext = extracted.traceIdContext(); if (traceIdContext != null) { return _toSpan(null, decorateContext( InternalPropagation.instance.flags(extracted.traceIdContext()), traceIdContext.traceIdHigh(), traceIdContext.traceId(), 0L, 0L, 0L, extracted.extra() )); } SamplingFlags samplingFlags = extracted.samplingFlags(); List<Object> extra = extracted.extra(); TraceContext parent = currentTraceContext.get(); int flags; long traceIdHigh = 0L, traceId = 0L, localRootId = 0L, spanId = 0L; if (parent != null) { // At this point, we didn't extract trace IDs, but do have a trace in progress. Since typical // trace sampling is up front, we retain the decision from the parent. flags = InternalPropagation.instance.flags(parent); traceIdHigh = parent.traceIdHigh(); traceId = parent.traceId(); localRootId = parent.localRootId(); spanId = parent.spanId(); extra = concat(extra, parent.extra()); } else { flags = InternalPropagation.instance.flags(samplingFlags); } return _toSpan(parent, decorateContext(flags, traceIdHigh, traceId, localRootId, spanId, 0L, extra)); }
@Test void nextSpan_extractedTraceContext() { TraceContextOrSamplingFlags extracted = TraceContextOrSamplingFlags.create(context); assertThat(tracer.nextSpan(extracted).context()) .extracting(TraceContext::traceId, TraceContext::parentId) .containsExactly(1L, 2L); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) { return helper.interpret(session, st, context); }
@Test void should_execute_bound_statement_with_no_bound_value() { // Given String queries = "@prepare[select_no_bound_value]=SELECT name,country,styles " + "FROM zeppelin.artists LIMIT 3\n" + "@bind[select_no_bound_value]"; // When final InterpreterResult actual = interpreter.interpret(queries, intrContext); // Then assertEquals(Code.SUCCESS, actual.code()); assertEquals("name\tcountry\tstyles\n" + "Bogdan Raczynski\tPoland\t[Dance, Electro]\n" + "Krishna Das\tUSA\t[Unknown]\n" + "Sheryl Crow\tUSA\t[Classic, Rock, Country, Blues, Pop, Folk]\n", actual.message().get(0).getData()); }
@Deprecated @Restricted(DoNotUse.class) public static String resolve(ConfigurationContext context, String toInterpolate) { return context.getSecretSourceResolver().resolve(toInterpolate); }
@Test public void resolve_nothingDefault() { assertThat(resolve("${:-default}"), equalTo("default")); }
public boolean isIncluded(Path absolutePath, Path relativePath, InputFile.Type type) { PathPattern[] inclusionPatterns = InputFile.Type.MAIN == type ? mainInclusionsPattern : testInclusionsPattern; if (inclusionPatterns.length == 0) { return true; } for (PathPattern pattern : inclusionPatterns) { if (pattern.match(absolutePath, relativePath)) { return true; } } return false; }
@Test public void should_handleAliasForTestInclusionsProperty() { settings.setProperty(PROJECT_TESTS_INCLUSIONS_PROPERTY, "**/*Dao.java"); AbstractExclusionFilters filter = new AbstractExclusionFilters(analysisWarnings, settings.asConfig()::getStringArray) { }; IndexedFile indexedFile = new DefaultIndexedFile("foo", moduleBaseDir, "test/main/java/com/mycompany/FooDao.java", null); assertThat(filter.isIncluded(indexedFile.path(), Paths.get(indexedFile.relativePath()), InputFile.Type.TEST)).isTrue(); indexedFile = new DefaultIndexedFile("foo", moduleBaseDir, "test/main/java/com/mycompany/Foo.java", null); assertThat(filter.isIncluded(indexedFile.path(), Paths.get(indexedFile.relativePath()), InputFile.Type.TEST)).isFalse(); String expectedWarn = "Use of sonar.tests.inclusions detected. " + "While being taken into account, the only supported property is sonar.test.inclusions. Consider updating your configuration."; assertThat(logTester.logs(Level.WARN)).hasSize(1) .contains(expectedWarn); verify(analysisWarnings).addUnique(expectedWarn); }
public static Configuration unix() { return UnixHolder.UNIX; }
@Test public void testFileSystemForDefaultUnixConfiguration() throws IOException { FileSystem fs = Jimfs.newFileSystem(Configuration.unix()); assertThat(fs.getRootDirectories()) .containsExactlyElementsIn(ImmutableList.of(fs.getPath("/"))) .inOrder(); assertThatPath(fs.getPath("").toRealPath()).isEqualTo(fs.getPath("/work")); assertThat(Iterables.getOnlyElement(fs.getFileStores()).getTotalSpace()) .isEqualTo(4L * 1024 * 1024 * 1024); assertThat(fs.supportedFileAttributeViews()).containsExactly("basic"); Files.createFile(fs.getPath("/foo")); Files.createFile(fs.getPath("/FOO")); }
public static String toGroupName(GsonTeam team) { return toGroupName(team.getOrganizationId(), team.getId()); }
@Test public void toGroupName_withGsonTeam_returnsCorrectGroupName() { GsonTeam team = new GsonTeam("team-1", new GsonTeam.GsonOrganization("Org1")); assertThat(GithubTeamConverter.toGroupName(team)).isEqualTo("Org1/team-1"); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return this.send(record, null); }
@Test void send_should_tag_topic_and_key() { tracingProducer.send(new ProducerRecord<>(TEST_TOPIC, TEST_KEY, TEST_VALUE)); mockProducer.completeNext(); MutableSpan producerSpan = spans.get(0); assertThat(producerSpan.kind()).isEqualTo(PRODUCER); assertThat(producerSpan.tags()) .containsOnly(entry("kafka.topic", TEST_TOPIC), entry("kafka.key", TEST_KEY)); }
public static void checkNewCodeDefinitionParam(@Nullable String newCodeDefinitionType, @Nullable String newCodeDefinitionValue) { if (newCodeDefinitionType == null && newCodeDefinitionValue != null) { throw new IllegalArgumentException("New code definition type is required when new code definition value is provided"); } }
@Test public void checkNewCodeDefinitionParam_throw_IAE_if_newCodeDefinitionValue_is_provided_without_newCodeDefinitionType() { assertThatThrownBy(() -> newCodeDefinitionResolver.checkNewCodeDefinitionParam(null, "anyvalue")) .isInstanceOf(IllegalArgumentException.class) .hasMessageContaining("New code definition type is required when new code definition value is provided"); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { final FileEntity entity = new FilesApi(new BrickApiClient(session)) .download(StringUtils.removeStart(file.getAbsolute(), String.valueOf(Path.DELIMITER)), null, null, null, null); final HttpUriRequest request = new HttpGet(entity.getDownloadUri()); if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); final String header; if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } request.addHeader(new BasicHeader(HttpHeaders.RANGE, header)); // Disable compression request.addHeader(new BasicHeader(HttpHeaders.ACCEPT_ENCODING, "identity")); } final HttpResponse response = session.getClient().execute(request); switch(response.getStatusLine().getStatusCode()) { case HttpStatus.SC_OK: case HttpStatus.SC_PARTIAL_CONTENT: return new HttpMethodReleaseInputStream(response, status); default: throw new DefaultHttpResponseExceptionMappingService().map("Download {0} failed", new HttpResponseException( response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()), file); } } catch(ApiException e) { throw new BrickExceptionMappingService().map("Download {0} failed", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Download {0} failed", e, file); } }
@Test public void testReadInterrupt() throws Exception { final byte[] content = RandomUtils.nextBytes(32769); final TransferStatus writeStatus = new TransferStatus(); writeStatus.setLength(content.length); final Path room = new BrickDirectoryFeature(session).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path test = new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final BrickMultipartWriteFeature writer = new BrickMultipartWriteFeature(session); final HttpResponseOutputStream<FileEntity> out = writer.write(test, writeStatus, new DisabledConnectionCallback()); assertNotNull(out); new StreamCopier(writeStatus, writeStatus).transfer(new ByteArrayInputStream(content), out); // Unknown length in status final TransferStatus readStatus = new TransferStatus(); // Read a single byte { final InputStream in = new BrickReadFeature(session).read(test, readStatus, new DisabledConnectionCallback()); assertNotNull(in.read()); in.close(); } { final InputStream in = new BrickReadFeature(session).read(test, readStatus, new DisabledConnectionCallback()); assertNotNull(in); in.close(); } new BrickDeleteFeature(session).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public ExcludePath getExclude() { return exclude; }
@Test public void testExcludePath() { ShenyuConfig.ExcludePath exclude = config.getExclude(); exclude.setEnabled(true); exclude.setPaths(Collections.emptyList()); List<String> paths = exclude.getPaths(); Boolean enabled = exclude.getEnabled(); notEmptyElements(paths, enabled); }
@Override public ExecuteResult execute(final ServiceContext serviceContext, final ConfiguredKsqlPlan plan, final boolean restoreInProgress) { try { final ExecuteResult result = EngineExecutor .create(primaryContext, serviceContext, plan.getConfig()) .execute(plan.getPlan(), restoreInProgress); return result; } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { // add the statement text to the KsqlException throw new KsqlStatementException( e.getMessage(), e.getMessage(), plan.getPlan().getStatementText(), e.getCause() ); } }
@Test public void shouldThrowOnInsertIntoWithKeyMismatch() { // Given: setupKsqlEngineWithSharedRuntimeEnabled(); execute( serviceContext, ksqlEngine, "create stream bar as select * from orders;", ksqlConfig, emptyMap() ); // When: final KsqlStatementException e = assertThrows( KsqlStatementException.class, () -> KsqlEngineTestUtil.execute( serviceContext, ksqlEngine, "insert into bar select * from orders partition by orderid;", ksqlConfig, Collections.emptyMap() ) ); // Then: assertThat(e, rawMessage(containsString("Incompatible schema between results and sink."))); assertThat(e, rawMessage(containsString("Result schema is `ORDERID` BIGINT KEY, "))); assertThat(e, rawMessage(containsString("Sink schema is `ORDERTIME` BIGINT KEY, "))); assertThat(e, statementText(is( "insert into bar select * from orders partition by orderid;"))); }
@Override public void checkBeforeUpdate(final CreateMaskRuleStatement sqlStatement) { ifNotExists = sqlStatement.isIfNotExists(); if (!ifNotExists) { checkDuplicatedRuleNames(sqlStatement); } checkAlgorithms(sqlStatement); }
@Test void assertCheckSQLStatementWithDuplicateMaskRule() { MaskRule rule = mock(MaskRule.class); when(rule.getConfiguration()).thenReturn(getCurrentRuleConfiguration()); executor.setRule(rule); assertThrows(DuplicateRuleException.class, () -> executor.checkBeforeUpdate(createDuplicatedSQLStatement(false, "MD5"))); }
@Override public List<String> getColumnNames(Configuration conf) throws HiveJdbcDatabaseAccessException { return getColumnMetadata(conf, ResultSetMetaData::getColumnName); }
@Test(expected = HiveJdbcDatabaseAccessException.class) public void testGetColumnNames_invalidQuery() throws HiveJdbcDatabaseAccessException { Configuration conf = buildConfiguration(); conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from invalid_strategy"); DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); @SuppressWarnings("unused") List<String> columnNames = accessor.getColumnNames(conf); }
public static byte[] encodeObjectIdentifier(String oid) { try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(oid.length() / 3 + 1)) { encodeObjectIdentifier(oid, bos); return bos.toByteArray(); } catch (IOException e) { throw new Asn1Exception("Unexpected IO exception", e); } }
@Test public void encodeObjectIdentifierWithTripleFirst() { assertArrayEquals(new byte[] { (byte) 0xff, (byte) 0xff, 6 }, Asn1Utils.encodeObjectIdentifier("2.2096950")); }
public Region lookup(final Path file) throws BackgroundException { final Path container = containerService.getContainer(file); if(Location.unknown.equals(new SwiftLocationFeature.SwiftRegion(container.attributes().getRegion()))) { return this.lookup(location.getLocation(container)); } return this.lookup(new SwiftLocationFeature.SwiftRegion(container.attributes().getRegion())); }
@Test public void testLookupDefault() throws Exception { final Region lookup = new SwiftRegionService(session).lookup(Location.unknown); assertTrue(lookup.isDefault()); assertEquals("DFW", lookup.getRegionId()); assertNotNull(lookup.getCDNManagementUrl()); assertNotNull(lookup.getStorageUrl()); }
public FEELFnResult<Object> invoke(@ParameterName("list") List list) { if ( list == null || list.isEmpty() ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null or empty")); } else { try { return FEELFnResult.ofResult(Collections.max(list, new InterceptNotComparableComparator())); } catch (ClassCastException e) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "contains items that are not comparable")); } } }
@Test void invokeListOfStrings() { FunctionTestUtil.assertResult(maxFunction.invoke(Collections.singletonList("a")), "a"); FunctionTestUtil.assertResult(maxFunction.invoke(Arrays.asList("a", "b", "c")), "c"); FunctionTestUtil.assertResult(maxFunction.invoke(Arrays.asList("a", "c", "b")), "c"); FunctionTestUtil.assertResult(maxFunction.invoke(Arrays.asList("c", "a", "b")), "c"); }
@GET public Response getContainers(@PathParam("version") String version, @HeaderParam(HEADER_ACCEPT) String acceptHeader, @HeaderParam(HEADER_ACCEPT_ENCODING) String acceptEncoding, @HeaderParam(EurekaAccept.HTTP_X_EUREKA_ACCEPT) String eurekaAccept, @Context UriInfo uriInfo, @Nullable @QueryParam("regions") String regionsStr) { boolean isRemoteRegionRequested = null != regionsStr && !regionsStr.isEmpty(); String[] regions = null; if (!isRemoteRegionRequested) { EurekaMonitors.GET_ALL.increment(); } else { regions = regionsStr.toLowerCase().split(","); Arrays.sort(regions); // So we don't have different caches for same regions queried in different order. EurekaMonitors.GET_ALL_WITH_REMOTE_REGIONS.increment(); } // Check if the server allows the access to the registry. The server can // restrict access if it is not // ready to serve traffic depending on various reasons. if (!registry.shouldAllowAccess(isRemoteRegionRequested)) { return Response.status(Status.FORBIDDEN).build(); } CurrentRequestVersion.set(Version.toEnum(version)); KeyType keyType = Key.KeyType.JSON; String returnMediaType = MediaType.APPLICATION_JSON; if (acceptHeader == null || !acceptHeader.contains(HEADER_JSON_VALUE)) { keyType = Key.KeyType.XML; returnMediaType = MediaType.APPLICATION_XML; } Key cacheKey = new Key(Key.EntityType.Application, ResponseCacheImpl.ALL_APPS, keyType, CurrentRequestVersion.get(), EurekaAccept.fromString(eurekaAccept), regions ); Response response; if (acceptEncoding != null && acceptEncoding.contains(HEADER_GZIP_VALUE)) { response = Response.ok(responseCache.getGZIP(cacheKey)) .header(HEADER_CONTENT_ENCODING, HEADER_GZIP_VALUE) .header(HEADER_CONTENT_TYPE, returnMediaType) .build(); } else { response = Response.ok(responseCache.get(cacheKey)) .build(); } CurrentRequestVersion.remove(); logger.debug("Sent registry information to client."); return response; }
@Test public void testMiniAppsGet() throws Exception { Response response = applicationsResource.getContainers( Version.V2.name(), MediaType.APPLICATION_JSON, null, // encoding EurekaAccept.compact.name(), null, // uriInfo null // remote regions ); String json = String.valueOf(response.getEntity()); DecoderWrapper decoder = CodecWrappers.getDecoder(CodecWrappers.LegacyJacksonJson.class); Applications decoded = decoder.decode(json, Applications.class); // test per app as the full apps list include the mock server that is not part of the test apps for (Application application : testApplications.getRegisteredApplications()) { Application decodedApp = decoded.getRegisteredApplications(application.getName()); // assert false as one is mini, so should NOT equal assertThat(EurekaEntityComparators.equal(application, decodedApp), is(false)); } for (Application application : testApplications.getRegisteredApplications()) { Application decodedApp = decoded.getRegisteredApplications(application.getName()); assertThat(application.getName(), is(decodedApp.getName())); // now do mini equals for (InstanceInfo instanceInfo : application.getInstances()) { InstanceInfo decodedInfo = decodedApp.getByInstanceId(instanceInfo.getId()); assertThat(EurekaEntityComparators.equalMini(instanceInfo, decodedInfo), is(true)); } } }
public ASN1Sequence retrievePipFromSignedPip(ASN1Sequence signedPip) throws BsnkException { ASN1Sequence signedPipContent = (ASN1Sequence) signedPip.getObjectAt(1); ASN1Sequence pip = (ASN1Sequence) signedPipContent.getObjectAt(0); ASN1ObjectIdentifier objectIdentifier = (ASN1ObjectIdentifier) pip.getObjectAt(0); if (!objectIdentifier.getId().equals(PIP_OID)) { throw new BsnkException("SignedPipNoPipFault", String.format("Signed pip doesnt contain a pip. Expected identifier: '%s'. Found identifier: '%s'", PIP_OID, objectIdentifier.toString()), null); } return pip; }
@Test public void retrievePipFromSignedPipTest() throws IOException, BsnkException { ASN1Sequence result = bsnkUtil.retrievePipFromSignedPip(signedPip); assertEquals(Base64.getEncoder().encodeToString(result.getEncoded()), pipbase64); }
@SneakyThrows(PSQLException.class) @Override public Object read(final PostgreSQLPacketPayload payload, final int parameterValueLength) { byte[] binaryDate = new byte[4]; payload.getByteBuf().readBytes(binaryDate); return new TimestampUtils(false, null).toDateBin(null, binaryDate); }
@Test void assertRead() throws PSQLException { byte[] payloadBytes = new byte[4]; Date expected = Date.valueOf("2023-01-30"); new TimestampUtils(false, null).toBinDate(null, payloadBytes, expected); PostgreSQLPacketPayload payload = new PostgreSQLPacketPayload(Unpooled.wrappedBuffer(payloadBytes), StandardCharsets.UTF_8); assertThat(new PostgreSQLDateBinaryProtocolValue().read(payload, 4), is(expected)); }
public static void throwTooManyActivePushQueriesException( final KsqlExecutionContext executionContext, final KsqlRestConfig ksqlRestConfig, final String statementStr ) { final String sanitizedMessage = String.format( "Not executing statement(s) as it would cause the number " + "of active, push queries to exceed the configured limit. " + "Terminate existing PUSH queries, " + "or increase the '%s' setting via the 'ksql-server.properties' file. " + "Current push query count: %d. Configured limit: %d.", KsqlRestConfig.MAX_PUSH_QUERIES, getNumLivePushQueries(executionContext), getPushQueryLimit(ksqlRestConfig) ); final String unloggedMessage = String.format( "Not executing statement(s) '%s' as it would cause the number " + "of active, push queries to exceed the configured limit. " + "Terminate existing PUSH queries, " + "or increase the '%s' setting via the 'ksql-server.properties' file. " + "Current push query count: %d. Configured limit: %d.", statementStr, KsqlRestConfig.MAX_PUSH_QUERIES, getNumLivePushQueries(executionContext), getPushQueryLimit(ksqlRestConfig) ); throw new KsqlStatementException( sanitizedMessage, unloggedMessage, statementStr ); }
@Test public void shouldThrowWhenPushQueryLimitExceeded() { // Given: final String statementStr = "my statement"; givenAllLiveQueries(10); givenActivePersistentQueries(4); givenPushQueryLimit(3); // When: final KsqlStatementException e = assertThrows( KsqlStatementException.class, () -> QueryCapacityUtil.throwTooManyActivePushQueriesException(ksqlEngine, ksqlRestConfig, statementStr) ); // Then: assertThat(e.getUnloggedMessage(), containsString( "Not executing statement(s) 'my statement' as it would cause the number " + "of active, push queries to exceed the configured limit. " + "Terminate existing PUSH queries, " + "or increase the 'ksql.max.push.queries' setting " + "via the 'ksql-server.properties' file. " + "Current push query count: 6. Configured limit: 3.")); assertThat(e.getMessage(), containsString( "Not executing statement(s) as it would cause the number " + "of active, push queries to exceed the configured limit. " + "Terminate existing PUSH queries, " + "or increase the 'ksql.max.push.queries' setting " + "via the 'ksql-server.properties' file. " + "Current push query count: 6. Configured limit: 3.")); assertThat(e.getSqlStatement(), containsString("my statement")); }
@Override public void freeSlot(AllocationID allocationId) { Preconditions.checkNotNull(allocationId); checkStarted(); LOG.info("Freeing slot {}.", allocationId); final Optional<TaskManagerSlotInformation> slotOptional = taskManagerTracker.getAllocatedOrPendingSlot(allocationId); if (!slotOptional.isPresent()) { LOG.warn("Try to free unknown slot {}.", allocationId); return; } final TaskManagerSlotInformation slot = slotOptional.get(); if (slot.getState() == SlotState.PENDING) { pendingSlotAllocations.remove(allocationId); } resourceTracker.notifyLostResource(slot.getJobId(), slot.getResourceProfile()); taskManagerTracker.notifySlotStatus( allocationId, slot.getJobId(), slot.getInstanceId(), slot.getResourceProfile(), SlotState.FREE); }
@Test void testAllocationUpdatesIgnoredIfSlotFreed() throws Exception { testSlotAllocation( (slotStatusSyncer, taskManagerTracker, ignored, allocationId) -> { slotStatusSyncer.freeSlot(allocationId); assertThat(taskManagerTracker.getAllocatedOrPendingSlot(allocationId)) .isEmpty(); }); }
@Override public AlterConsumerGroupOffsetsResult alterConsumerGroupOffsets( String groupId, Map<TopicPartition, OffsetAndMetadata> offsets, AlterConsumerGroupOffsetsOptions options ) { SimpleAdminApiFuture<CoordinatorKey, Map<TopicPartition, Errors>> future = AlterConsumerGroupOffsetsHandler.newFuture(groupId); AlterConsumerGroupOffsetsHandler handler = new AlterConsumerGroupOffsetsHandler(groupId, offsets, logContext); invokeDriver(handler, future, options.timeoutMs); return new AlterConsumerGroupOffsetsResult(future.get(CoordinatorKey.byGroupId(groupId))); }
@Test public void testAlterConsumerGroupOffsetsFindCoordinatorNonRetriableErrors() throws Exception { // Non-retriable FindCoordinatorResponse errors throw an exception final TopicPartition tp1 = new TopicPartition("foo", 0); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(1, 0))) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse( prepareFindCoordinatorResponse(Errors.GROUP_AUTHORIZATION_FAILED, Node.noNode())); Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>(); offsets.put(tp1, new OffsetAndMetadata(123L)); final AlterConsumerGroupOffsetsResult errorResult = env.adminClient() .alterConsumerGroupOffsets(GROUP_ID, offsets); TestUtils.assertFutureError(errorResult.all(), GroupAuthorizationException.class); TestUtils.assertFutureError(errorResult.partitionResult(tp1), GroupAuthorizationException.class); } }
@Override public void initialize(Map<String, Object> config) { if (config != null) { RuntimeOpts opts = ObjectMapperFactory.getMapper().getObjectMapper().convertValue(config, RuntimeOpts.class); if (opts != null) { runtimeOpts = opts; } } else { log.warn("initialize with null config"); } }
@Test public void TestInitializeWithData() { BasicKubernetesManifestCustomizer customizer = new BasicKubernetesManifestCustomizer(); Map<String, Object> confs = new HashMap<>(); confs.put("jobNamespace", "custom-ns"); confs.put("jobName", "custom-name"); customizer.initialize(confs); assertNotEquals(customizer.getRuntimeOpts(), null); assertEquals(customizer.getRuntimeOpts().getJobName(), "custom-name"); assertEquals(customizer.getRuntimeOpts().getJobNamespace(), "custom-ns"); }
@Override protected SSHClient connect(final ProxyFinder proxy, final HostKeyCallback key, final LoginCallback prompt, final CancelCallback cancel) throws BackgroundException { final DefaultConfig configuration = new DefaultConfig(); if("zlib".equals(preferences.getProperty("ssh.compression"))) { configuration.setCompressionFactories(Arrays.asList( new JcraftDelayedZlibCompression.Factory(), new JcraftZlibCompression.Factory(), new NoneCompression.Factory())); } else { configuration.setCompressionFactories(Collections.singletonList(new NoneCompression.Factory())); } configuration.setVersion(new PreferencesUseragentProvider().get()); final KeepAliveProvider heartbeat; if(preferences.getProperty("ssh.heartbeat.provider").equals("keep-alive")) { heartbeat = KeepAliveProvider.KEEP_ALIVE; } else { heartbeat = KeepAliveProvider.HEARTBEAT; } configuration.setKeepAliveProvider(heartbeat); configuration.setCipherFactories(this.lowestPriorityForCBC(configuration.getCipherFactories())); return this.connect(key, prompt, configuration); }
@Test @Ignore public void testUsernameChangeReconnect() throws Exception { final Host host = new Host(new SFTPProtocol(), "test.cyberduck.ch", new Credentials("u1", "p1")); final Session session = new SFTPSession(host, new DisabledX509TrustManager(), new DefaultX509KeyManager()); final AtomicBoolean change = new AtomicBoolean(); final LoginConnectionService login = new LoginConnectionService(new DisabledLoginCallback() { @Override public Local select(final Local identity) throws LoginCanceledException { return new NullLocal("k"); } @Override public Credentials prompt(final Host bookmark, String username, String title, String reason, LoginOptions options) { if(change.get()) { assertEquals("Change of username or service not allowed: (u1,ssh-connection) -> (jenkins,ssh-connection). Please contact your web hosting service provider for assistance.", reason); return null; } else { assertEquals("Login failed", title); // assertEquals("Too many authentication failures for u1. Please contact your web hosting service provider for assistance.", reason); // assertEquals("Exhausted available authentication methods. Please contact your web hosting service provider for assistance.", reason); change.set(true); return new Credentials(System.getProperties().getProperty("sftp.user"), System.getProperties().getProperty("sftp.password")); } } }, new DisabledHostKeyCallback(), new DisabledPasswordStore(), new DisabledProgressListener()); login.connect(session, new DisabledCancelCallback()); assertTrue(change.get()); }
@Override public Byte getByte(K name) { return null; }
@Test public void testGetByteDefault() { assertEquals((byte) 0, HEADERS.getByte("name1", (byte) 0)); }
@Override public long connectionDelay(Node node, long now) { return connectionStates.connectionDelay(node.idString(), now); }
@Test public void testConnectionDelay() { long now = time.milliseconds(); long delay = client.connectionDelay(node, now); assertEquals(0, delay); }
public static ClusterOperatorConfig buildFromMap(Map<String, String> map) { warningsForRemovedEndVars(map); KafkaVersion.Lookup lookup = parseKafkaVersions(map.get(STRIMZI_KAFKA_IMAGES), map.get(STRIMZI_KAFKA_CONNECT_IMAGES), map.get(STRIMZI_KAFKA_MIRROR_MAKER_IMAGES), map.get(STRIMZI_KAFKA_MIRROR_MAKER_2_IMAGES)); return buildFromMap(map, lookup); }
@Test public void testAnyNamespaceInList() { assertThrows(InvalidConfigurationException.class, () -> { Map<String, String> envVars = new HashMap<>(ClusterOperatorConfigTest.ENV_VARS); envVars.put(ClusterOperatorConfig.NAMESPACE.key(), "foo,*,bar,baz"); ClusterOperatorConfig.buildFromMap(envVars, KafkaVersionTestUtils.getKafkaVersionLookup()); }); }
@Override public String toString() { return "LocalMapStatsImpl{" + "lastAccessTime=" + lastAccessTime + ", lastUpdateTime=" + lastUpdateTime + ", hits=" + hits + ", numberOfOtherOperations=" + numberOfOtherOperations + ", numberOfEvents=" + numberOfEvents + ", getCount=" + getCount + ", putCount=" + putCount + ", setCount=" + setCount + ", removeCount=" + removeCount + ", evictionCount=" + evictionCount + ", expirationCount=" + expirationCount + ", totalGetLatencies=" + convertNanosToMillis(totalGetLatenciesNanos) + ", totalPutLatencies=" + convertNanosToMillis(totalPutLatenciesNanos) + ", totalSetLatencies=" + convertNanosToMillis(totalSetLatenciesNanos) + ", totalRemoveLatencies=" + convertNanosToMillis(totalRemoveLatenciesNanos) + ", maxGetLatency=" + convertNanosToMillis(maxGetLatency) + ", maxPutLatency=" + convertNanosToMillis(maxPutLatency) + ", maxSetLatency=" + convertNanosToMillis(maxSetLatency) + ", maxRemoveLatency=" + convertNanosToMillis(maxRemoveLatency) + ", ownedEntryCount=" + ownedEntryCount + ", backupEntryCount=" + backupEntryCount + ", backupCount=" + backupCount + ", ownedEntryMemoryCost=" + ownedEntryMemoryCost + ", backupEntryMemoryCost=" + backupEntryMemoryCost + ", creationTime=" + creationTime + ", lockedEntryCount=" + lockedEntryCount + ", dirtyEntryCount=" + dirtyEntryCount + ", heapCost=" + heapCost + ", merkleTreesCost=" + merkleTreesCost + ", nearCacheStats=" + (nearCacheStats != null ? nearCacheStats : "") + ", queryCount=" + queryCount + ", indexedQueryCount=" + indexedQueryCount + ", indexStats=" + indexStats + ", replicationStats=" + replicationStats + '}'; }
@Test public void testToString() { String printed = localMapStats.toString(); assertTrue(printed.contains("lastAccessTime")); assertTrue(printed.contains("lastUpdateTime")); assertTrue(printed.contains("hits=12314")); assertTrue(printed.contains("numberOfOtherOperations=5")); assertTrue(printed.contains("numberOfEvents=2")); assertTrue(printed.contains("getCount=3")); assertTrue(printed.contains("putCount=2")); assertTrue(printed.contains("setCount=2")); assertTrue(printed.contains("removeCount=1")); assertTrue(printed.contains("totalGetLatencies=1247")); assertTrue(printed.contains("totalPutLatencies=5632")); assertTrue(printed.contains("totalSetLatencies=8722")); assertTrue(printed.contains("totalRemoveLatencies=1238")); assertTrue(printed.contains("maxGetLatency=1233")); assertTrue(printed.contains("maxPutLatency=5631")); assertTrue(printed.contains("maxSetLatency=8721")); assertTrue(printed.contains("maxRemoveLatency=1238")); assertTrue(printed.contains("ownedEntryCount=5")); assertTrue(printed.contains("backupEntryCount=3")); assertTrue(printed.contains("backupCount=4")); assertTrue(printed.contains("ownedEntryMemoryCost=1234")); assertTrue(printed.contains("backupEntryMemoryCost=4321")); assertTrue(printed.contains("creationTime")); assertTrue(printed.contains("lockedEntryCount=1231")); assertTrue(printed.contains("dirtyEntryCount=4252")); assertTrue(printed.contains("heapCost=7461762")); assertTrue(printed.contains("merkleTreesCost=0")); assertTrue(printed.contains("nearCacheStats")); assertTrue(printed.contains("queryCount=10")); assertTrue(printed.contains("indexedQueryCount=5")); assertTrue(printed.contains("indexStats")); }
public void addStripAction(@NonNull StripActionProvider provider, boolean highPriority) { for (var stripActionView : mStripActionViews) { if (stripActionView.getTag(PROVIDER_TAG_ID) == provider) { return; } } var actionView = provider.inflateActionView(this); if (actionView.getParent() != null) throw new IllegalStateException("StripActionProvider inflated a view with a parent!"); actionView.setTag(PROVIDER_TAG_ID, provider); if (mShowActionStrip) { if (highPriority) { addView(actionView, FIRST_PROVIDER_VIEW_INDEX); } else { addView(actionView); } } if (highPriority) { mStripActionViews.add(0, actionView); } else { mStripActionViews.add(actionView); } invalidate(); }
@Test public void testHighPriority() { View view = new View(mUnderTest.getContext()); View viewHigh = new View(mUnderTest.getContext()); KeyboardViewContainerView.StripActionProvider provider = Mockito.mock(KeyboardViewContainerView.StripActionProvider.class); Mockito.doReturn(view).when(provider).inflateActionView(any()); KeyboardViewContainerView.StripActionProvider providerHigh = Mockito.mock(KeyboardViewContainerView.StripActionProvider.class); Mockito.doReturn(viewHigh).when(providerHigh).inflateActionView(any()); mUnderTest.addStripAction(provider, false); Assert.assertEquals(3, mUnderTest.getChildCount()); Assert.assertSame(view, mUnderTest.getChildAt(2)); mUnderTest.addStripAction(providerHigh, true); Assert.assertEquals(4, mUnderTest.getChildCount()); Assert.assertSame(viewHigh, mUnderTest.getChildAt(2)); Assert.assertSame(view, mUnderTest.getChildAt(3)); }
@Override public Object convert(String value) { if (isNullOrEmpty(value)) { return value; } if (value.contains("=")) { final Map<String, String> fields = new HashMap<>(); Matcher m = PATTERN.matcher(value); while (m.find()) { if (m.groupCount() != 2) { continue; } fields.put(removeQuotes(m.group(1)), removeQuotes(m.group(2))); } return fields; } else { return Collections.emptyMap(); } }
@Test public void testFilterWithStringInBetween() { TokenizerConverter f = new TokenizerConverter(new HashMap<String, Object>()); @SuppressWarnings("unchecked") Map<String, String> result = (Map<String, String>) f.convert("foo k2=v2 lolwat Awesome! k1=v1"); assertEquals(2, result.size()); assertEquals("v1", result.get("k1")); assertEquals("v2", result.get("k2")); }
public static DynamicVoter parse(String input) { input = input.trim(); int atIndex = input.indexOf("@"); if (atIndex < 0) { throw new IllegalArgumentException("No @ found in dynamic voter string."); } if (atIndex == 0) { throw new IllegalArgumentException("Invalid @ at beginning of dynamic voter string."); } String idString = input.substring(0, atIndex); int nodeId; try { nodeId = Integer.parseInt(idString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse node id in dynamic voter string.", e); } if (nodeId < 0) { throw new IllegalArgumentException("Invalid negative node id " + nodeId + " in dynamic voter string."); } input = input.substring(atIndex + 1); if (input.isEmpty()) { throw new IllegalArgumentException("No hostname found after node id."); } String host; if (input.startsWith("[")) { int endBracketIndex = input.indexOf("]"); if (endBracketIndex < 0) { throw new IllegalArgumentException("Hostname began with left bracket, but no right " + "bracket was found."); } host = input.substring(1, endBracketIndex); input = input.substring(endBracketIndex + 1); } else { int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following hostname could be found."); } host = input.substring(0, endColonIndex); input = input.substring(endColonIndex); } if (!input.startsWith(":")) { throw new IllegalArgumentException("Port section must start with a colon."); } input = input.substring(1); int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following port could be found."); } String portString = input.substring(0, endColonIndex); int port; try { port = Integer.parseInt(portString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse port in dynamic voter string.", e); } if (port < 0 || port > 65535) { throw new IllegalArgumentException("Invalid port " + port + " in dynamic voter string."); } String directoryIdString = input.substring(endColonIndex + 1); Uuid directoryId; try { directoryId = Uuid.fromString(directoryIdString); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Failed to parse directory ID in dynamic voter string.", e); } return new DynamicVoter(directoryId, nodeId, host, port); }
@Test public void testParseDynamicVoterWithoutId() { assertEquals("No @ found in dynamic voter string.", assertThrows(IllegalArgumentException.class, () -> DynamicVoter.parse("localhost:8020:K90IZ-0DRNazJ49kCZ1EMQ")). getMessage()); }
public @CheckForNull V start() throws Exception { V result = null; int currentAttempt = 0; boolean success = false; while (currentAttempt < attempts && !success) { currentAttempt++; try { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, Messages.Retrier_Attempt(currentAttempt, action)); } result = callable.call(); } catch (Exception e) { if (duringActionExceptions == null || Stream.of(duringActionExceptions).noneMatch(exception -> exception.isAssignableFrom(e.getClass()))) { // if the raised exception is not considered as a controlled exception doing the action, rethrow it LOGGER.log(Level.WARNING, Messages.Retrier_ExceptionThrown(currentAttempt, action), e); throw e; } else { // if the exception is considered as a failed action, notify it to the listener LOGGER.log(Level.INFO, Messages.Retrier_ExceptionFailed(currentAttempt, action), e); if (duringActionExceptionListener != null) { LOGGER.log(Level.INFO, Messages.Retrier_CallingListener(e.getLocalizedMessage(), currentAttempt, action)); result = duringActionExceptionListener.apply(currentAttempt, e); } } } // After the call and the call to the listener, which can change the result, test the result success = checkResult.test(currentAttempt, result); if (!success) { if (currentAttempt < attempts) { LOGGER.log(Level.WARNING, Messages.Retrier_AttemptFailed(currentAttempt, action)); LOGGER.log(Level.FINE, Messages.Retrier_Sleeping(delay, action)); try { Thread.sleep(delay); } catch (InterruptedException ie) { LOGGER.log(Level.FINE, Messages.Retrier_Interruption(action)); Thread.currentThread().interrupt(); // flag this thread as interrupted currentAttempt = attempts; // finish } } else { // Failed to perform the action LOGGER.log(Level.INFO, Messages.Retrier_NoSuccess(action, attempts)); } } else { LOGGER.log(Level.INFO, Messages.Retrier_Success(action, currentAttempt)); } } return result; }
@Test public void failedActionWithAllowedExceptionWithListenerChangingResultTest() throws Exception { final int ATTEMPTS = 1; final String ACTION = "print"; RingBufferLogHandler handler = new RingBufferLogHandler(20); Logger.getLogger(Retrier.class.getName()).addHandler(handler); // Set the required params Retrier<Boolean> r = new Retrier.Builder<>( // action to perform (Callable<Boolean>) () -> { throw new IndexOutOfBoundsException("Exception allowed considered as failure"); }, // check the result and return true if success (currentAttempt, result) -> result, //name of the action ACTION ) // Set the optional parameters .withAttempts(ATTEMPTS) // Exceptions allowed .withDuringActionExceptions(new Class[]{IndexOutOfBoundsException.class}) // Listener to call. It change the result to success .withDuringActionExceptionListener((attempt, exception) -> true) // Construct the object .build(); // Begin the process catching the allowed exception Boolean finalResult = r.start(); Assert.assertTrue(finalResult != null && finalResult); // The action was a success String textSuccess = Messages.Retrier_Success(ACTION, ATTEMPTS); assertTrue(String.format("The log should contain '%s'", textSuccess), handler.getView().stream().anyMatch(m -> m.getMessage().contains(textSuccess))); // And the message talking about the allowed raised is also there String testException = Messages.Retrier_ExceptionFailed(ATTEMPTS, ACTION); assertTrue(String.format("The log should contain '%s'", testException), handler.getView().stream().anyMatch(m -> m.getMessage().startsWith(testException))); }
public org.apache.hadoop.mapreduce.Counters getJobCounters(JobID arg0) throws IOException, InterruptedException { org.apache.hadoop.mapreduce.v2.api.records.JobId jobID = TypeConverter.toYarn(arg0); GetCountersRequest request = recordFactory.newRecordInstance(GetCountersRequest.class); request.setJobId(jobID); Counters cnt = ((GetCountersResponse) invoke("getCounters", GetCountersRequest.class, request)).getCounters(); return TypeConverter.fromYarn(cnt); }
@Test public void testCountersFromHistoryServer() throws Exception { MRClientProtocol historyServerProxy = mock(MRClientProtocol.class); when(historyServerProxy.getCounters(getCountersRequest())).thenReturn( getCountersResponseFromHistoryServer()); ResourceMgrDelegate rm = mock(ResourceMgrDelegate.class); when(rm.getApplicationReport(TypeConverter.toYarn(oldJobId).getAppId())) .thenReturn(null); ClientServiceDelegate clientServiceDelegate = getClientServiceDelegate( historyServerProxy, rm); Counters counters = TypeConverter.toYarn(clientServiceDelegate.getJobCounters(oldJobId)); Assert.assertNotNull(counters); Assert.assertEquals(1001, counters.getCounterGroup("dummyCounters").getCounter("dummyCounter").getValue()); }
public static long getFileSize(Page statisticsPage, int position) { // FileStatistics page layout: // // fileSize rowCount // X X if (position < 0 || position >= statisticsPage.getPositionCount()) { throw new PrestoException(MALFORMED_HIVE_FILE_STATISTICS, format("Invalid position: %d specified for FileStatistics page", position)); } return BIGINT.getLong(statisticsPage.getBlock(FILE_SIZE_CHANNEL), position); }
@Test public void testGetFileSize() { Page statisticsPage = createTestStatisticsPageWithOneRow(ImmutableList.of(BIGINT, BIGINT), ImmutableList.of(FILE_SIZE, ROW_COUNT)); assertEquals(getFileSize(statisticsPage, 0), FILE_SIZE); }
public Optional<InetAddress> getRemoteAddress(final LocalAddress localAddress) { return Optional.ofNullable(remoteChannelsByLocalAddress.get(localAddress)) .map(remoteChannel -> remoteChannel.attr(REMOTE_ADDRESS_ATTRIBUTE_KEY).get()); }
@Test void getRemoteAddress() { clientConnectionManager.handleConnectionEstablished(localChannel, remoteChannel, Optional.empty()); assertEquals(Optional.empty(), clientConnectionManager.getRemoteAddress(localChannel.localAddress())); final InetAddress remoteAddress = InetAddresses.forString("6.7.8.9"); remoteChannel.attr(ClientConnectionManager.REMOTE_ADDRESS_ATTRIBUTE_KEY).set(remoteAddress); assertEquals(Optional.of(remoteAddress), clientConnectionManager.getRemoteAddress(localChannel.localAddress())); }
@Override public int initiateUpgrade(String appName, String fileName, boolean autoFinalize) throws IOException, YarnException { Service upgradeService = loadAppJsonFromLocalFS(fileName, appName, null, null); if (autoFinalize) { upgradeService.setState(ServiceState.UPGRADING_AUTO_FINALIZE); } else { upgradeService.setState(ServiceState.UPGRADING); } return initiateUpgrade(upgradeService); }
@Test public void testUpgradeDisabledByDefault() throws Exception { Service service = createService(); ServiceClient client = MockServiceClient.create(rule, service, false); //upgrade the service service.setVersion("v2"); try { client.initiateUpgrade(service); } catch (YarnException ex) { Assert.assertEquals(ErrorStrings.SERVICE_UPGRADE_DISABLED, ex.getMessage()); return; } Assert.fail(); }
public static Gson instance() { return SingletonHolder.INSTANCE; }
@Test void rejectsSerializationOfGoCipher() { final IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> Serialization.instance().toJson(new GoCipher(mock(Encrypter.class)))); assertEquals(format("Refusing to serialize a %s instance and leak security details!", GoCipher.class.getName()), e.getMessage()); }
public static byte[] hashUUID(PrimaryKey primaryKey) { Object[] values = primaryKey.getValues(); byte[] result = new byte[values.length * 16]; ByteBuffer byteBuffer = ByteBuffer.wrap(result).order(ByteOrder.BIG_ENDIAN); for (Object value : values) { if (value == null) { throw new IllegalArgumentException("Found null value in primary key"); } UUID uuid; try { uuid = UUID.fromString(value.toString()); } catch (Throwable t) { return primaryKey.asBytes(); } byteBuffer.putLong(uuid.getMostSignificantBits()); byteBuffer.putLong(uuid.getLeastSignificantBits()); } return result; }
@Test public void testHashUUID() { // Test happy cases: when all UUID values are valid testHashUUID(new UUID[]{UUID.randomUUID()}); testHashUUID(new UUID[]{UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID()}); // Test failure scenario when there's a non-null invalid uuid value PrimaryKey invalidUUIDs = new PrimaryKey(new String[]{"some-random-string"}); byte[] hashResult = HashUtils.hashUUID(invalidUUIDs); // In case of failures, each element is prepended with length byte[] expectedResult = invalidUUIDs.asBytes(); assertEquals(hashResult, expectedResult); // Test failure scenario when one of the values is null try { PrimaryKey pKeyWithNull = new PrimaryKey(new String[]{UUID.randomUUID().toString(), null}); HashUtils.hashUUID(pKeyWithNull); fail("Should have thrown an exception"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Found null value")); } }
@Description("Returns a Geometry type object from Well-Known Text representation (WKT)") @ScalarFunction("ST_GeometryFromText") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stGeometryFromText(@SqlType(VARCHAR) Slice input) { return serialize(jtsGeometryFromWkt(input.toStringUtf8())); }
@Test public void testSTGeometryFromText() { assertInvalidFunction("ST_GeometryFromText('xyz')", "Invalid WKT: Unknown geometry type: XYZ (line 1)"); assertInvalidFunction("ST_GeometryFromText('LINESTRING (-71.3839245 42.3128124)')", "Invalid WKT: Invalid number of points in LineString (found 1 - must be 0 or >= 2)"); assertInvalidFunction("ST_GeometryFromText('POLYGON ((-13.719076 9.508430, -13.723493 9.510049, -13.719076 9.508430))')", "corrupted geometry"); assertInvalidFunction("ST_GeometryFromText('POLYGON ((-13.637339 9.617113, -13.637339 9.617113))')", "Invalid WKT: Invalid number of points in LinearRing (found 2 - must be 0 or >= 3)"); assertInvalidFunction("ST_GeometryFromText('POLYGON(0 0)')", INVALID_FUNCTION_ARGUMENT, "Invalid WKT: Expected EMPTY or ( but found '0' (line 1)"); assertInvalidFunction("ST_GeometryFromText('POLYGON((0 0))')", INVALID_FUNCTION_ARGUMENT, "Invalid WKT: Invalid number of points in LineString (found 1 - must be 0 or >= 2)"); }
public T send() throws IOException { return web3jService.send(this, responseType); }
@Test public void testEthGetStorageAt() throws Exception { web3j.ethGetStorageAt( "0x295a70b2de5e3953354a6a8344e616ed314d7251", BigInteger.ZERO, DefaultBlockParameterName.LATEST) .send(); verifyResult( "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getStorageAt\"," + "\"params\":[\"0x295a70b2de5e3953354a6a8344e616ed314d7251\",\"0x0\",\"latest\"]," + "\"id\":1}"); }
public static String encodeQuantity(BigInteger value) { if (value.signum() != -1) { return HEX_PREFIX + value.toString(16); } else { throw new MessageEncodingException("Negative values are not supported"); } }
@Test public void testQuantityEncodeNegative() { assertThrows( MessageEncodingException.class, () -> Numeric.encodeQuantity(BigInteger.valueOf(-1))); }
@Deprecated @Restricted(DoNotUse.class) public static String resolve(ConfigurationContext context, String toInterpolate) { return context.getSecretSourceResolver().resolve(toInterpolate); }
@Test public void resolve_singleEntryWithDefaultValueAndWithEnvDefined() { environment.set("FOO", "hello"); assertThat(resolve("${FOO:-default}"), equalTo("hello")); }
@Override public ParsedLine parse(final String line, final int cursor, final ParseContext context) { final ParsedLine parsed = delegate.parse(line, cursor, context); if (context != ParseContext.ACCEPT_LINE) { return parsed; } if (UnclosedQuoteChecker.isUnclosedQuote(line)) { throw new EOFError(-1, -1, "Missing end quote", "end quote char"); } final String bare = CommentStripper.strip(parsed.line()); if (bare.isEmpty()) { return parsed; } if (cliCmdPredicate.test(bare)) { return parsed; } if (!bare.endsWith(TERMINATION_CHAR)) { throw new EOFError(-1, -1, "Missing termination char", "termination char"); } return parsed; }
@Test public void shouldAcceptTerminatedLineEndingInComment() { // Given: givenDelegateWillReturn(TERMINATED_LINE + " -- this is a comment"); // When: final ParsedLine result = parser.parse("what ever", 0, ParseContext.ACCEPT_LINE); // Then: assertThat(result, is(parsedLine)); }
public static void createDirectory(Path path, Configuration conf) { try { FileSystem fileSystem = FileSystem.get(path.toUri(), conf); if (!fileSystem.mkdirs(path)) { LOG.error("Mkdir {} returned false", path); throw new IOException("mkdirs returned false"); } } catch (IOException e) { LOG.error("Failed to create directory: {}", path); throw new StarRocksConnectorException("Failed to create directory: " + path, e); } }
@Test public void testCreateDirectory() { Path path = new Path("hdfs://127.0.0.1:9000/user/hive/warehouse/db"); ExceptionChecker.expectThrowsWithMsg(StarRocksConnectorException.class, "Failed to create directory", () -> HiveWriteUtils.createDirectory(path, new Configuration())); new MockUp<FileSystem>() { @Mock public FileSystem get(URI uri, Configuration conf) { return new MockedRemoteFileSystem(HDFS_HIVE_TABLE); } }; ExceptionChecker.expectThrowsWithMsg(StarRocksConnectorException.class, "Failed to create directory", () -> HiveWriteUtils.createDirectory(path, new Configuration())); }
public static void main(String[] args) { if (args.length < 1 || args[0].equals("-h") || args[0].equals("--help")) { System.out.println(usage); return; } // Copy args, because CommandFormat mutates the list. List<String> argsList = new ArrayList<String>(Arrays.asList(args)); CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, "-glob", "-jar"); try { cf.parse(argsList); } catch (UnknownOptionException e) { terminate(1, "unrecognized option"); return; } String classPath = System.getProperty("java.class.path"); if (cf.getOpt("-glob")) { // The classpath returned from the property has been globbed already. System.out.println(classPath); } else if (cf.getOpt("-jar")) { if (argsList.isEmpty() || argsList.get(0) == null || argsList.get(0).isEmpty()) { terminate(1, "-jar option requires path of jar file to write"); return; } // Write the classpath into the manifest of a temporary jar file. Path workingDir = new Path(System.getProperty("user.dir")); final String tmpJarPath; try { tmpJarPath = FileUtil.createJarWithClassPath(classPath, workingDir, System.getenv())[0]; } catch (IOException e) { terminate(1, "I/O error creating jar: " + e.getMessage()); return; } // Rename the temporary file to its final location. String jarPath = argsList.get(0); try { FileUtil.replaceFile(new File(tmpJarPath), new File(jarPath)); } catch (IOException e) { terminate(1, "I/O error renaming jar temporary file to path: " + e.getMessage()); return; } } }
@Test public void testUnrecognized() { try { Classpath.main(new String[] { "--notarealoption" }); fail("expected exit"); } catch (ExitUtil.ExitException e) { assertTrue(stdout.toByteArray().length == 0); String strErr = new String(stderr.toByteArray(), UTF8); assertTrue(strErr.contains("unrecognized option")); } }
@PreAcquireNamespaceLock @PostMapping("/apps/{appId}/clusters/{clusterName}/namespaces/{namespaceName}/items") public ItemDTO create(@PathVariable("appId") String appId, @PathVariable("clusterName") String clusterName, @PathVariable("namespaceName") String namespaceName, @RequestBody ItemDTO dto) { Item entity = BeanUtils.transform(Item.class, dto); Item managedEntity = itemService.findOne(appId, clusterName, namespaceName, entity.getKey()); if (managedEntity != null) { throw BadRequestException.itemAlreadyExists(entity.getKey()); } entity = itemService.save(entity); dto = BeanUtils.transform(ItemDTO.class, entity); commitService.createCommit(appId, clusterName, namespaceName, new ConfigChangeContentBuilder().createItem(entity).build(), dto.getDataChangeLastModifiedBy() ); return dto; }
@Test @Sql(scripts = "/controller/test-itemset.sql", executionPhase = ExecutionPhase.BEFORE_TEST_METHOD) @Sql(scripts = "/controller/cleanup.sql", executionPhase = ExecutionPhase.AFTER_TEST_METHOD) public void testCreate() { String appId = "someAppId"; AppDTO app = restTemplate.getForObject(appBaseUrl(), AppDTO.class, appId); assert app != null; ClusterDTO cluster = restTemplate.getForObject(clusterBaseUrl(), ClusterDTO.class, app.getAppId(), "default"); assert cluster != null; NamespaceDTO namespace = restTemplate.getForObject(namespaceBaseUrl(), NamespaceDTO.class, app.getAppId(), cluster.getName(), "application"); String itemKey = "test-key"; String itemValue = "test-value"; ItemDTO item = new ItemDTO(itemKey, itemValue, "", 1); assert namespace != null; item.setNamespaceId(namespace.getId()); item.setDataChangeLastModifiedBy("apollo"); ResponseEntity<ItemDTO> response = restTemplate.postForEntity(itemBaseUrl(), item, ItemDTO.class, app.getAppId(), cluster.getName(), namespace.getNamespaceName()); Assert.assertEquals(HttpStatus.OK, response.getStatusCode()); Assert.assertEquals(itemKey, Objects.requireNonNull(response.getBody()).getKey()); List<Commit> commitList = commitRepository.findByAppIdAndClusterNameAndNamespaceNameOrderByIdDesc(app.getAppId(), cluster.getName(), namespace.getNamespaceName(), Pageable.ofSize(10)); Assert.assertEquals(1, commitList.size()); Commit commit = commitList.get(0); Assert.assertTrue(commit.getChangeSets().contains(itemKey)); Assert.assertTrue(commit.getChangeSets().contains(itemValue)); }
static void scan(Class<?> aClass, BiConsumer<Method, Annotation> consumer) { // prevent unnecessary checking of Object methods if (Object.class.equals(aClass)) { return; } if (!isInstantiable(aClass)) { return; } for (Method method : safelyGetMethods(aClass)) { scan(consumer, aClass, method); } }
@Test void scan_ignores_object() { MethodScanner.scan(Object.class, backend); assertThat(scanResult, empty()); }
public List<ZAddressRange<Integer>> zOrderSearchCurveIntegers(List<ZValueRange> ranges) { List<ZAddressRange<Long>> addressRanges = zOrderSearchCurve(ranges); List<ZAddressRange<Integer>> integerAddressRanges = new ArrayList<>(); for (ZAddressRange<Long> addressRange : addressRanges) { checkArgument( (addressRange.getMinimumAddress() <= Integer.MAX_VALUE) && (addressRange.getMaximumAddress() <= Integer.MAX_VALUE), format("The address range [%d, %d] contains addresses greater than integers.", addressRange.getMinimumAddress(), addressRange.getMaximumAddress())); integerAddressRanges.add(new ZAddressRange<>(addressRange.getMinimumAddress().intValue(), addressRange.getMaximumAddress().intValue())); } return integerAddressRanges; }
@Test public void testZOrderSearchCurveOutOfBounds() { List<Integer> bitPositions = ImmutableList.of(1); ZOrder zOrder = new ZOrder(bitPositions); List<ZValueRange> ranges = ImmutableList.of(new ZValueRange(ImmutableList.of(Optional.of(-3)), ImmutableList.of(Optional.of(-3)))); List<ZAddressRange<Integer>> addresses = zOrder.zOrderSearchCurveIntegers(ranges); assertEquals(addresses, ImmutableList.of()); ranges = ImmutableList.of(new ZValueRange(ImmutableList.of(Optional.of(3)), ImmutableList.of(Optional.of(3)))); addresses = zOrder.zOrderSearchCurveIntegers(ranges); assertEquals(addresses, ImmutableList.of()); }
public RowExpression extract(PlanNode node) { return node.accept(new Visitor(domainTranslator, functionAndTypeManager), null); }
@Test public void testRightJoin() { ImmutableList.Builder<EquiJoinClause> criteriaBuilder = ImmutableList.builder(); criteriaBuilder.add(new EquiJoinClause(AV, DV)); criteriaBuilder.add(new EquiJoinClause(BV, EV)); List<EquiJoinClause> criteria = criteriaBuilder.build(); Map<VariableReferenceExpression, ColumnHandle> leftAssignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(AV, BV, CV))); TableScanNode leftScan = tableScanNode(leftAssignments); Map<VariableReferenceExpression, ColumnHandle> rightAssignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(DV, EV, FV))); TableScanNode rightScan = tableScanNode(rightAssignments); FilterNode left = filter(leftScan, and( lessThan(BV, AV), lessThan(CV, bigintLiteral(10)), equals(GV, bigintLiteral(10)))); FilterNode right = filter(rightScan, and( equals(DV, EV), lessThan(FV, bigintLiteral(100)))); PlanNode node = new JoinNode( Optional.empty(), newId(), JoinType.RIGHT, left, right, criteria, ImmutableList.<VariableReferenceExpression>builder() .addAll(left.getOutputVariables()) .addAll(right.getOutputVariables()) .build(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of()); RowExpression effectivePredicate = effectivePredicateExtractor.extract(node); // All left side symbols should be checked against NULL assertEquals(normalizeConjuncts(effectivePredicate), normalizeConjuncts(or(lessThan(BV, AV), and(isNull(BV), isNull(AV))), or(lessThan(CV, bigintLiteral(10)), isNull(CV)), equals(DV, EV), lessThan(FV, bigintLiteral(100)), or(equals(AV, DV), isNull(AV)), or(equals(BV, EV), isNull(BV)))); }
public static RowCoder of(Schema schema) { return new RowCoder(schema); }
@Test public void testLogicalType() throws Exception { EnumerationType enumeration = EnumerationType.create("one", "two", "three"); Schema schema = Schema.builder().addLogicalTypeField("f_enum", enumeration).build(); Row row = Row.withSchema(schema).addValue(enumeration.valueOf("two")).build(); CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row); }
@Override public BaseCombineOperator run() { try (InvocationScope ignored = Tracing.getTracer().createScope(CombinePlanNode.class)) { return getCombineOperator(); } }
@Test public void testParallelExecution() { AtomicInteger count = new AtomicInteger(0); Random rand = new Random(); for (int i = 0; i < 5; i++) { count.set(0); int numPlans = rand.nextInt(5000); List<PlanNode> planNodes = new ArrayList<>(); for (int index = 0; index < numPlans; index++) { planNodes.add(() -> { count.incrementAndGet(); return null; }); } _queryContext.setEndTimeMs(System.currentTimeMillis() + Server.DEFAULT_QUERY_EXECUTOR_TIMEOUT_MS); CombinePlanNode combinePlanNode = new CombinePlanNode(planNodes, _queryContext, _executorService, null); combinePlanNode.run(); Assert.assertEquals(numPlans, count.get()); } }
@Override public boolean canHandleReturnType(Class<?> returnType) { return rxSupportedTypes.stream().anyMatch(classType -> classType.isAssignableFrom(returnType)); }
@Test public void testCheckTypes() { assertThat(rxJava2TimeLimiterAspectExt.canHandleReturnType(Flowable.class)).isTrue(); assertThat(rxJava2TimeLimiterAspectExt.canHandleReturnType(Single.class)).isTrue(); assertThat(rxJava2TimeLimiterAspectExt.canHandleReturnType(Observable.class)).isTrue(); assertThat(rxJava2TimeLimiterAspectExt.canHandleReturnType(Completable.class)).isTrue(); assertThat(rxJava2TimeLimiterAspectExt.canHandleReturnType(Maybe.class)).isTrue(); }
public List<String> getKeys() { return CommonsConfigurationUtils.getKeys(_configuration); }
@Test public void assertGetKeys() { Map<String, Object> properties = new HashMap<>(); properties.put("property.1.key", "val1"); properties.put("property.2.key", "val1"); properties.put("property.3.key", "val1"); properties.put("property.4.key", "val1"); PinotConfiguration pinotConfiguration = new PinotConfiguration(properties, new HashMap<>()); List<String> keys = pinotConfiguration.getKeys(); Assert.assertTrue(keys.contains("property.1.key")); Assert.assertTrue(keys.contains("property.2.key")); Assert.assertTrue(keys.contains("property.3.key")); Assert.assertTrue(keys.contains("property.4.key")); Assert.assertEquals(keys.size(), 4); }
public static String normalizeUri(String uri) throws URISyntaxException { // try to parse using the simpler and faster Camel URI parser String[] parts = CamelURIParser.fastParseUri(uri); if (parts != null) { // we optimized specially if an empty array is returned if (parts == URI_ALREADY_NORMALIZED) { return uri; } // use the faster and more simple normalizer return doFastNormalizeUri(parts); } else { // use the legacy normalizer as the uri is complex and may have unsafe URL characters return doComplexNormalizeUri(uri); } }
@Test public void testNormalizeEndpointUriSort() throws Exception { String out1 = URISupport.normalizeUri("smtp://localhost?to=foo&from=me"); assertEquals("smtp://localhost?from=me&to=foo", out1); String out2 = URISupport.normalizeUri("smtp://localhost?from=me&to=foo"); assertEquals("smtp://localhost?from=me&to=foo", out2); assertEquals(out1, out2); }
@Override protected int command() { if (!validateConfigFilePresent()) { return 1; } final MigrationConfig config; try { config = MigrationConfig.load(getConfigFile()); } catch (KsqlException | MigrationException e) { LOGGER.error(e.getMessage()); return 1; } return command( config, MigrationsUtil::getKsqlClient, getMigrationsDir(getConfigFile(), config), Clock.systemDefaultZone() ); }
@Test public void shouldLogErrorStateIfMigrationFails() throws Exception { // Given: command = PARSER.parse("-n"); createMigrationFile(1, NAME, migrationsDir, COMMAND); when(versionQueryResult.get()).thenReturn(ImmutableList.of()); when(statementResultCf.get()).thenThrow(new ExecutionException("sql rejected", new RuntimeException())); // When: final int result = command.command(config, (cfg, headers) -> ksqlClient, migrationsDir, Clock.fixed( Instant.ofEpochMilli(1000), ZoneId.systemDefault())); // Then: assertThat(result, is(1)); final InOrder inOrder = inOrder(ksqlClient); verifyMigratedVersion( inOrder, 1, "<none>", MigrationState.ERROR, Optional.of("Failed to execute sql: " + COMMAND + ". Error: sql rejected")); inOrder.verify(ksqlClient).close(); inOrder.verifyNoMoreInteractions(); }
@Override public void onMsg(TbContext ctx, TbMsg msg) { locks.computeIfAbsent(msg.getOriginator(), SemaphoreWithTbMsgQueue::new) .addToQueueAndTryProcess(msg, ctx, this::processMsgAsync); }
@Test public void test_sqrt_5_body() { var node = initNode(TbRuleNodeMathFunctionType.SQRT, new TbMathResult(TbMathArgumentType.MESSAGE_BODY, "result", 3, false, false, null), new TbMathArgument(TbMathArgumentType.MESSAGE_BODY, "a") ); TbMsg msg = TbMsg.newMsg(TbMsgType.POST_TELEMETRY_REQUEST, originator, TbMsgMetaData.EMPTY, JacksonUtil.newObjectNode().put("a", 5).toString()); node.onMsg(ctx, msg); ArgumentCaptor<TbMsg> msgCaptor = ArgumentCaptor.forClass(TbMsg.class); verify(ctx, timeout(TIMEOUT)).tellSuccess(msgCaptor.capture()); TbMsg resultMsg = msgCaptor.getValue(); assertNotNull(resultMsg); assertNotNull(resultMsg.getData()); var resultJson = JacksonUtil.toJsonNode(resultMsg.getData()); assertTrue(resultJson.has("result")); assertEquals(2.236, resultJson.get("result").asDouble(), 0.0); }
@VisibleForTesting void addUsrLibFolderToShipFiles(Collection<File> effectiveShipFiles) { // Add usrlib folder to the ship files if it exists // Classes in the folder will be loaded by UserClassLoader if CLASSPATH_INCLUDE_USER_JAR is // DISABLED. ClusterEntrypointUtils.tryFindUserLibDirectory() .ifPresent( usrLibDirFile -> { effectiveShipFiles.add(usrLibDirFile); LOG.info( "usrlib: {} will be shipped automatically.", usrLibDirFile.getAbsolutePath()); }); }
@Test void testShipUsrLib() throws IOException { final Map<String, String> oldEnv = System.getenv(); final Map<String, String> env = new HashMap<>(1); final File homeFolder = Files.createTempDirectory(temporaryFolder, UUID.randomUUID().toString()).toFile(); final File libFolder = new File(homeFolder.getAbsolutePath(), "lib"); assertThat(libFolder.createNewFile()).isTrue(); final File usrLibFolder = new File(homeFolder.getAbsolutePath(), ConfigConstants.DEFAULT_FLINK_USR_LIB_DIR); assertThat(usrLibFolder.mkdirs()).isTrue(); final File usrLibFile = new File(usrLibFolder, "usrLibFile.jar"); assertThat(usrLibFile.createNewFile()).isTrue(); env.put(ConfigConstants.ENV_FLINK_LIB_DIR, libFolder.getAbsolutePath()); CommonTestUtils.setEnv(env); try (YarnClusterDescriptor descriptor = createYarnClusterDescriptor()) { final Set<File> effectiveShipFiles = new HashSet<>(); descriptor.addUsrLibFolderToShipFiles(effectiveShipFiles); assertThat(effectiveShipFiles).containsExactlyInAnyOrder(usrLibFolder); } finally { CommonTestUtils.setEnv(oldEnv); } }
public static int hashToIndex(int hash, int length) { checkPositive("length", length); if (hash == Integer.MIN_VALUE) { return 0; } return abs(hash) % length; }
@Test public void hashToIndex_whenHashZero() { int result = hashToIndex(420, 100); assertEquals(20, result); }
static BlockStmt getCompoundPredicateVariableDeclaration(final String variableName, final CompoundPredicate compoundPredicate, final List<Field<?>> fields) { final MethodDeclaration methodDeclaration = COMPOUND_PREDICATE_TEMPLATE.getMethodsByName(GETKIEPMMLCOMPOUNDPREDICATE).get(0).clone(); final BlockStmt compoundPredicateBody = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration))); final VariableDeclarator variableDeclarator = getVariableDeclarator(compoundPredicateBody, COMPOUND_PREDICATE).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, COMPOUND_PREDICATE, compoundPredicateBody))); variableDeclarator.setName(variableName); final BlockStmt toReturn = new BlockStmt(); int counter = 0; final NodeList<Expression> arguments = new NodeList<>(); for (Predicate predicate : compoundPredicate.getPredicates()) { String nestedVariableName = String.format(VARIABLE_NAME_TEMPLATE, variableName, counter); arguments.add(new NameExpr(nestedVariableName)); BlockStmt toAdd = getKiePMMLPredicate(nestedVariableName, predicate, fields); toAdd.getStatements().forEach(toReturn::addStatement); counter ++; } final BOOLEAN_OPERATOR booleanOperator = BOOLEAN_OPERATOR.byName(compoundPredicate.getBooleanOperator().value()); final NameExpr booleanOperatorExpr = new NameExpr(BOOLEAN_OPERATOR.class.getName() + "." + booleanOperator.name()); final MethodCallExpr initializer = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, COMPOUND_PREDICATE, compoundPredicateBody))) .asMethodCallExpr(); final MethodCallExpr builder = getChainedMethodCallExprFrom("builder", initializer); builder.setArgument(1, booleanOperatorExpr); getChainedMethodCallExprFrom("asList", initializer).setArguments(arguments); compoundPredicateBody.getStatements().forEach(toReturn::addStatement); return toReturn; }
@Test void getCompoundPredicateVariableDeclaration() throws IOException { String variableName = "variableName"; SimplePredicate simplePredicate1 = getSimplePredicate(PARAM_1, value1, operator1); SimplePredicate simplePredicate2 = getSimplePredicate(PARAM_2, value2, operator2); Array.Type arrayType = Array.Type.STRING; List<String> values = getStringObjects(arrayType, 4); SimpleSetPredicate simpleSetPredicate = getSimpleSetPredicate(values, arrayType, SimpleSetPredicate.BooleanOperator.IS_IN); CompoundPredicate compoundPredicate = new CompoundPredicate(); compoundPredicate.setBooleanOperator(CompoundPredicate.BooleanOperator.AND); compoundPredicate.getPredicates().add(0, simplePredicate1); compoundPredicate.getPredicates().add(1, simplePredicate2); compoundPredicate.getPredicates().add(2, simpleSetPredicate); DataField dataField1 = new DataField(); dataField1.setName(simplePredicate1.getField()); dataField1.setDataType(DataType.DOUBLE); DataField dataField2 = new DataField(); dataField2.setName(simplePredicate2.getField()); dataField2.setDataType(DataType.DOUBLE); DataField dataField3 = new DataField(); dataField3.setName(simpleSetPredicate.getField()); dataField3.setDataType(DataType.DOUBLE); DataDictionary dataDictionary = new DataDictionary(); dataDictionary.addDataFields(dataField1, dataField2, dataField3); String booleanOperatorString = BOOLEAN_OPERATOR.class.getName() + "." + BOOLEAN_OPERATOR.byName(compoundPredicate.getBooleanOperator().value()).name(); String valuesString = values.stream() .map(valueString -> "\"" + valueString + "\"") .collect(Collectors.joining(",")); final List<Field<?>> fields = getFieldsFromDataDictionary(dataDictionary); BlockStmt retrieved = KiePMMLCompoundPredicateFactory.getCompoundPredicateVariableDeclaration(variableName, compoundPredicate, fields); String text = getFileContent(TEST_01_SOURCE); Statement expected = JavaParserUtils.parseBlock(String.format(text, variableName, valuesString, booleanOperatorString)); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); List<Class<?>> imports = Arrays.asList(KiePMMLCompoundPredicate.class, KiePMMLSimplePredicate.class, KiePMMLSimpleSetPredicate.class, Arrays.class, Collections.class); commonValidateCompilationWithImports(retrieved, imports); }
@Override public int hashCode() { int result = state != null ? state.hashCode() : 0; result = 31 * result + completed; result = 31 * result + total; return result; }
@Test public void testHashCode() { assertEquals(backupTaskStatus.hashCode(), backupTaskStatus.hashCode()); assertEquals(backupTaskStatus.hashCode(), backupTaskStatusWithSameAttributes.hashCode()); assumeDifferentHashCodes(); assertNotEquals(backupTaskStatus.hashCode(), backupTaskStatusOtherState.hashCode()); assertNotEquals(backupTaskStatus.hashCode(), backupTaskStatusOtherCompleted.hashCode()); assertNotEquals(backupTaskStatus.hashCode(), backupTaskStatusOtherTotal.hashCode()); }
static boolean objectIsAcyclic(Object object) { if (object == null) { return true; } Class<?> klass = object.getClass(); if (isPrimitiveClass(klass)) { return true; } else if (isComplexClass(klass)) { DataComplex complex = (DataComplex) object; try { Data.traverse(complex, new TraverseCallback() {}); return true; } catch (IOException e) { return false; } } else { throw new IllegalStateException("Object of unknown type: " + object); } }
@Test public void testObjectIsAcyclic() { assertTrue(Data.objectIsAcyclic(true)); assertTrue(Data.objectIsAcyclic(1)); assertTrue(Data.objectIsAcyclic(1L)); assertTrue(Data.objectIsAcyclic(1.0f)); assertTrue(Data.objectIsAcyclic(1.0)); assertTrue(Data.objectIsAcyclic("string")); assertTrue(Data.objectIsAcyclic(new DataMap())); assertTrue(Data.objectIsAcyclic(new DataList())); DataMap a = new DataMap(); DataList b = new DataList(); DataMap c = new DataMap(); a.put("b", b); a.put("c", c); assertTrue(Data.objectIsAcyclic(a)); assertTrue(Data.objectIsAcyclic(b)); assertTrue(Data.objectIsAcyclic(c)); DataMap dm = new DataMap(); b.add(dm); c.put("d", dm); assertTrue(Data.objectIsAcyclic(a)); assertTrue(Data.objectIsAcyclic(b)); assertTrue(Data.objectIsAcyclic(c)); assertTrue(Data.objectIsAcyclic(dm)); DataList e = new DataList(); DataMap f = new DataMap(); dm.put("e", e); dm.put("f", f); assertTrue(Data.objectIsAcyclic(a)); assertTrue(Data.objectIsAcyclic(b)); assertTrue(Data.objectIsAcyclic(c)); assertTrue(Data.objectIsAcyclic(dm)); assertTrue(Data.objectIsAcyclic(e)); assertTrue(Data.objectIsAcyclic(f)); a.disableChecker(); b.disableChecker(); c.disableChecker(); dm.disableChecker(); e.disableChecker(); f.disableChecker(); // loop from e to e assertTrue(Data.objectIsAcyclic(a)); e.add(e); assertFalse(Data.objectIsAcyclic(a)); assertFalse(Data.objectIsAcyclic(b)); assertFalse(Data.objectIsAcyclic(c)); assertFalse(Data.objectIsAcyclic(dm)); assertFalse(Data.objectIsAcyclic(e)); assertTrue(Data.objectIsAcyclic(f)); e.remove(0); // loop from e to dm assertTrue(Data.objectIsAcyclic(a)); e.add(dm); assertFalse(Data.objectIsAcyclic(a)); assertFalse(Data.objectIsAcyclic(b)); assertFalse(Data.objectIsAcyclic(c)); assertFalse(Data.objectIsAcyclic(dm)); assertFalse(Data.objectIsAcyclic(e)); assertTrue(Data.objectIsAcyclic(f)); e.remove(0); // loop from e to c assertTrue(Data.objectIsAcyclic(a)); e.add(c); assertFalse(Data.objectIsAcyclic(a)); assertFalse(Data.objectIsAcyclic(b)); assertFalse(Data.objectIsAcyclic(c)); assertFalse(Data.objectIsAcyclic(dm)); assertFalse(Data.objectIsAcyclic(e)); assertTrue(Data.objectIsAcyclic(f)); e.remove(0); // loop from e to b assertTrue(Data.objectIsAcyclic(a)); e.add(b); assertFalse(Data.objectIsAcyclic(a)); assertFalse(Data.objectIsAcyclic(b)); assertFalse(Data.objectIsAcyclic(c)); assertFalse(Data.objectIsAcyclic(dm)); assertFalse(Data.objectIsAcyclic(e)); assertTrue(Data.objectIsAcyclic(f)); e.remove(0); // loop from e to a assertTrue(Data.objectIsAcyclic(a)); e.add(a); assertFalse(Data.objectIsAcyclic(a)); assertFalse(Data.objectIsAcyclic(b)); assertFalse(Data.objectIsAcyclic(c)); assertFalse(Data.objectIsAcyclic(dm)); assertFalse(Data.objectIsAcyclic(e)); assertTrue(Data.objectIsAcyclic(f)); e.remove(0); // loop from f to f assertTrue(Data.objectIsAcyclic(a)); f.put("f", f); assertFalse(Data.objectIsAcyclic(a)); assertFalse(Data.objectIsAcyclic(b)); assertFalse(Data.objectIsAcyclic(c)); assertFalse(Data.objectIsAcyclic(dm)); assertTrue(Data.objectIsAcyclic(e)); assertFalse(Data.objectIsAcyclic(f)); f.remove("f"); // loop from f to dm assertTrue(Data.objectIsAcyclic(a)); f.put("d", dm); assertFalse(Data.objectIsAcyclic(a)); assertFalse(Data.objectIsAcyclic(b)); assertFalse(Data.objectIsAcyclic(c)); assertFalse(Data.objectIsAcyclic(dm)); assertTrue(Data.objectIsAcyclic(e)); assertFalse(Data.objectIsAcyclic(f)); f.remove("d"); // loop from f to c assertTrue(Data.objectIsAcyclic(a)); f.put("c", c); assertFalse(Data.objectIsAcyclic(a)); assertFalse(Data.objectIsAcyclic(b)); assertFalse(Data.objectIsAcyclic(c)); assertFalse(Data.objectIsAcyclic(dm)); assertTrue(Data.objectIsAcyclic(e)); assertFalse(Data.objectIsAcyclic(f)); f.remove("c"); // loop from f to b assertTrue(Data.objectIsAcyclic(a)); f.put("b", b); assertFalse(Data.objectIsAcyclic(a)); assertFalse(Data.objectIsAcyclic(b)); assertFalse(Data.objectIsAcyclic(c)); assertFalse(Data.objectIsAcyclic(dm)); assertTrue(Data.objectIsAcyclic(e)); assertFalse(Data.objectIsAcyclic(f)); f.remove("b"); // loop from f to a assertTrue(Data.objectIsAcyclic(a)); f.put("a", a); assertFalse(Data.objectIsAcyclic(a)); assertFalse(Data.objectIsAcyclic(b)); assertFalse(Data.objectIsAcyclic(c)); assertFalse(Data.objectIsAcyclic(dm)); assertTrue(Data.objectIsAcyclic(e)); assertFalse(Data.objectIsAcyclic(f)); f.remove("a"); }
@Override public void setWatermark(@NonNull List<Drawable> watermarks) { mWatermarks.clear(); mWatermarks.addAll(watermarks); for (Drawable watermark : mWatermarks) { watermark.setBounds(0, 0, mWatermarkDimen, mWatermarkDimen); } invalidate(); }
@Test public void testWatermarkSetsBounds() { final int dimen = getApplicationContext().getResources().getDimensionPixelOffset(R.dimen.watermark_size); List<Drawable> watermarks = Arrays.asList(Mockito.mock(Drawable.class), Mockito.mock(Drawable.class)); mViewUnderTest.setWatermark(watermarks); for (Drawable watermark : watermarks) { Mockito.verify(watermark).setBounds(0, 0, dimen, dimen); } }
static int getIndexInsensitive(CharSequence name, CharSequence value) { if (value.length() == 0) { HeaderNameIndex entry = getEntry(name); return entry == null || !entry.emptyValue ? NOT_FOUND : entry.index; } int bucket = headerBucket(value); HeaderIndex header = HEADERS_WITH_NON_EMPTY_VALUES[bucket]; if (header == null) { return NOT_FOUND; } if (equalsVariableTime(header.name, name) && equalsVariableTime(header.value, value)) { return header.index; } return NOT_FOUND; }
@Test public void testExistingHeaderNameButMissingValue() { assertEquals(-1, HpackStaticTable.getIndexInsensitive(":scheme", "missing")); }
public Health getHealthService() { return healthService; }
@Test void getHealthService() { Assertions.assertNotNull(manager.getHealthService()); }
@Deprecated public List<Invoker<T>> route(URL url, BitList<Invoker<T>> availableInvokers, Invocation invocation) { return getSingleChain(url, availableInvokers, invocation).route(url, availableInvokers, invocation); }
@Test void testRoute() { RouterChain<DemoService> routerChain = createRouterChanin(); // mockInvoker will be filtered out by MockInvokersSelector Invoker<DemoService> mockInvoker = createMockInvoker(); // invoker1 will be filtered out by MeshStateRouter Map<String, String> map1 = new HashMap<>(); map1.put("env-sign", "yyyyyyy"); Invoker<DemoService> invoker1 = createNormalInvoker(map1); // invoker2 will be filtered out by TagStateRouter Map<String, String> map2 = new HashMap<>(); map2.put("env-sign", "xxx"); map2.put("tag1", "hello"); Invoker<DemoService> invoker2 = createNormalInvoker(map2); // invoker3 will be filtered out by AppStateRouter Map<String, String> map3 = new HashMap<>(); map3.put("env-sign", "xxx"); map3.put("tag1", "hello"); map3.put(TAG_KEY, "TAG_"); Invoker<DemoService> invoker3 = createNormalInvoker(map3); // invoker4 will be filtered out by ServiceStateRouter Map<String, String> map4 = new HashMap<>(); map4.put("env-sign", "xxx"); map4.put("tag1", "hello"); map4.put(TAG_KEY, "TAG_"); map4.put("timeout", "5000"); Invoker<DemoService> invoker4 = createNormalInvoker(map4); // invoker5 is the only one returned at the end that is not filtered out Map<String, String> map5 = new HashMap<>(); map5.put("env-sign", "xxx"); map5.put("tag1", "hello"); map5.put(TAG_KEY, "TAG_"); map5.put("timeout", "5000"); map5.put("serialization", "hessian2"); Invoker<DemoService> invoker5 = createNormalInvoker(map5); BitList<Invoker<DemoService>> invokers = new BitList<>(Arrays.asList(mockInvoker, invoker1, invoker2, invoker3, invoker4, invoker5)); routerChain.setInvokers(invokers, () -> {}); // mesh rule for MeshStateRouter MeshRuleManager meshRuleManager = mockInvoker.getUrl().getOrDefaultModuleModel().getBeanFactory().getBean(MeshRuleManager.class); ConcurrentHashMap<String, MeshAppRuleListener> appRuleListeners = meshRuleManager.getAppRuleListeners(); MeshAppRuleListener meshAppRuleListener = appRuleListeners.get(invoker1.getUrl().getRemoteApplication()); ConfigChangedEvent configChangedEvent = new ConfigChangedEvent( "demo-route" + MESH_RULE_DATA_ID_SUFFIX, DynamicConfiguration.DEFAULT_GROUP, MESH_RULE1 + "---\n" + MESH_RULE2, ConfigChangeType.ADDED); meshAppRuleListener.process(configChangedEvent); // condition rule for AppStateRouter&ServiceStateRouter ListenableStateRouter serviceRouter = routerChain.getStateRouters().stream() .filter(s -> s instanceof ServiceStateRouter) .map(s -> (ListenableStateRouter) s) .findAny() .orElse(null); ConfigChangedEvent serviceConditionEvent = new ConfigChangedEvent( DynamicConfiguration.getRuleKey(mockInvoker.getUrl()) + ".condition-router", DynamicConfiguration.DEFAULT_GROUP, SERVICE_CONDITION_RULE, ConfigChangeType.ADDED); serviceRouter.process(serviceConditionEvent); ListenableStateRouter appRouter = routerChain.getStateRouters().stream() .filter(s -> s instanceof AppStateRouter) .map(s -> (ListenableStateRouter) s) .findAny() .orElse(null); ConfigChangedEvent appConditionEvent = new ConfigChangedEvent( "app.condition-router", DynamicConfiguration.DEFAULT_GROUP, APP_CONDITION_RULE, ConfigChangeType.ADDED); appRouter.process(appConditionEvent); // prepare consumerUrl and RpcInvocation URL consumerUrl = URL.valueOf("consumer://localhost/DemoInterface?remote.application=app1"); RpcInvocation rpcInvocation = new RpcInvocation(); rpcInvocation.setServiceName("DemoService"); rpcInvocation.setObjectAttachment("trafficLabel", "xxx"); rpcInvocation.setObjectAttachment(TAG_KEY, "TAG_"); RpcContext.getServiceContext().setNeedPrintRouterSnapshot(true); RouterSnapshotSwitcher routerSnapshotSwitcher = FrameworkModel.defaultModel().getBeanFactory().getBean(RouterSnapshotSwitcher.class); routerSnapshotSwitcher.addEnabledService("org.apache.dubbo.demo.DemoService"); // route List<Invoker<DemoService>> result = routerChain .getSingleChain(consumerUrl, invokers, rpcInvocation) .route(consumerUrl, invokers, rpcInvocation); Assertions.assertEquals(result.size(), 1); Assertions.assertTrue(result.contains(invoker5)); String snapshotLog = "[ Parent (Input: 6) (Current Node Output: 6) (Chain Node Output: 1) ] Input: localhost:9103,localhost:9103,localhost:9103,localhost:9103,localhost:9103 -> Chain Node Output: localhost:9103...\n" + " [ MockInvokersSelector (Input: 6) (Current Node Output: 5) (Chain Node Output: 1) Router message: invocation.need.mock not set. Return normal Invokers. ] Current Node Output: localhost:9103,localhost:9103,localhost:9103,localhost:9103,localhost:9103\n" + " [ StandardMeshRuleRouter (Input: 5) (Current Node Output: 4) (Chain Node Output: 1) Router message: Match App: app Subset: isolation ] Current Node Output: localhost:9103,localhost:9103,localhost:9103,localhost:9103\n" + " [ TagStateRouter (Input: 4) (Current Node Output: 3) (Chain Node Output: 1) Router message: Disable Tag Router. Reason: tagRouterRule is invalid or disabled ] Current Node Output: localhost:9103,localhost:9103,localhost:9103\n" + " [ ServiceStateRouter (Input: 3) (Current Node Output: 3) (Chain Node Output: 1) Router message: null ] Current Node Output: localhost:9103,localhost:9103,localhost:9103\n" + " [ ConditionStateRouter (Input: 3) (Current Node Output: 2) (Chain Node Output: 2) Router message: Match return. ] Current Node Output: localhost:9103,localhost:9103\n" + " [ ProviderAppStateRouter (Input: 2) (Current Node Output: 2) (Chain Node Output: 1) Router message: Directly return. Reason: Invokers from previous router is empty or conditionRouters is empty. ] Current Node Output: localhost:9103,localhost:9103\n" + " [ AppStateRouter (Input: 2) (Current Node Output: 2) (Chain Node Output: 1) Router message: null ] Current Node Output: localhost:9103,localhost:9103\n" + " [ ConditionStateRouter (Input: 2) (Current Node Output: 1) (Chain Node Output: 1) Router message: Match return. ] Current Node Output: localhost:9103\n" + " [ AppScriptStateRouter (Input: 1) (Current Node Output: 1) (Chain Node Output: 1) Router message: Directly return from script router. Reason: Invokers from previous router is empty or script is not enabled. Script rule is: null ] Current Node Output: localhost:9103"; String[] snapshot = routerSnapshotSwitcher.cloneSnapshot(); Assertions.assertTrue(snapshot[0].contains(snapshotLog)); RpcContext.getServiceContext().setNeedPrintRouterSnapshot(false); result = routerChain .getSingleChain(consumerUrl, invokers, rpcInvocation) .route(consumerUrl, invokers, rpcInvocation); Assertions.assertEquals(result.size(), 1); Assertions.assertTrue(result.contains(invoker5)); routerChain.destroy(); Assertions.assertEquals(routerChain.getRouters().size(), 0); Assertions.assertEquals(routerChain.getStateRouters().size(), 0); }
public static UAssignOp create(UExpression variable, Kind operator, UExpression expression) { checkArgument( TAG.containsKey(operator), "Tree kind %s does not represent a compound assignment operator", operator); return new AutoValue_UAssignOp(variable, operator, expression); }
@Test public void serialization() { SerializableTester.reserializeAndAssert( UAssignOp.create(UFreeIdent.create("x"), Kind.PLUS_ASSIGNMENT, ULiteral.intLit(2))); }
LogicalKeyValueSegment createReservedSegment(final long segmentId, final String segmentName) { if (segmentId >= 0) { throw new IllegalArgumentException("segmentId for a reserved segment must be negative"); } final LogicalKeyValueSegment newSegment = new LogicalKeyValueSegment(segmentId, segmentName, physicalStore); if (reservedSegments.put(segmentId, newSegment) != null) { throw new IllegalStateException("LogicalKeyValueSegment already exists."); } return newSegment; }
@Test public void shouldCreateReservedSegments() { final LogicalKeyValueSegment reservedSegment1 = segments.createReservedSegment(-1, "reserved-1"); final LogicalKeyValueSegment reservedSegment2 = segments.createReservedSegment(-2, "reserved-2"); final File rocksdbDir = new File(new File(context.stateDir(), DB_FILE_DIR), STORE_NAME); assertTrue(rocksdbDir.isDirectory()); assertTrue(reservedSegment1.isOpen()); assertTrue(reservedSegment2.isOpen()); }
public static void getSemanticPropsSingleFromString( SingleInputSemanticProperties result, String[] forwarded, String[] nonForwarded, String[] readSet, TypeInformation<?> inType, TypeInformation<?> outType) { getSemanticPropsSingleFromString( result, forwarded, nonForwarded, readSet, inType, outType, false); }
@Test void testReadFieldsInvalidString() { String[] readFields = {"notValid"}; SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); assertThatThrownBy( () -> SemanticPropUtil.getSemanticPropsSingleFromString( sp, null, null, readFields, threeIntTupleType, threeIntTupleType)) .isInstanceOf(InvalidSemanticAnnotationException.class); }
public LoginCallback create(final Controller controller) { try { final Constructor<? extends LoginCallback> constructor = ConstructorUtils.getMatchingAccessibleConstructor(clazz, controller.getClass()); if(null == constructor) { log.warn(String.format("No matching constructor for parameter %s", controller.getClass())); // Call default constructor for disabled implementations return clazz.getDeclaredConstructor().newInstance(); } return constructor.newInstance(controller); } catch(InstantiationException | InvocationTargetException | IllegalAccessException | NoSuchMethodException e) { log.error(String.format("Failure loading callback class %s. %s", clazz, e.getMessage())); return new DisabledLoginCallback(); } }
@Test public void testCreate() { assertNotNull(LoginCallbackFactory.get(new AbstractController() { @Override public void invoke(final MainAction runnable, final boolean wait) { } })); }