focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public void ackMessageAsync( final String addr, final long timeOut, final AckCallback ackCallback, final AckMessageRequestHeader requestHeader ) throws RemotingException, MQBrokerException, InterruptedException { ackMessageAsync(addr, timeOut, ackCallback, requestHeader, null); }
@Test public void testAckMessageAsync_Success() throws Exception { doAnswer((Answer<Void>) mock -> { InvokeCallback callback = mock.getArgument(3); RemotingCommand request = mock.getArgument(1); ResponseFuture responseFuture = new ResponseFuture(null, request.getOpaque(), 3 * 1000, null, null); RemotingCommand response = RemotingCommand.createResponseCommand(ResponseCode.SUCCESS, null); response.setOpaque(request.getOpaque()); response.setCode(ResponseCode.SUCCESS); responseFuture.setResponseCommand(response); callback.operationSucceed(responseFuture.getResponseCommand()); return null; }).when(remotingClient).invokeAsync(anyString(), any(RemotingCommand.class), anyLong(), any(InvokeCallback.class)); final CountDownLatch done = new CountDownLatch(1); mqClientAPI.ackMessageAsync(brokerAddr, 10 * 1000, new AckCallback() { @Override public void onSuccess(AckResult ackResult) { assertThat(ackResult.getStatus()).isEqualTo(AckStatus.OK); done.countDown(); } @Override public void onException(Throwable e) { Assertions.fail("want no exception but got one", e); done.countDown(); } }, new AckMessageRequestHeader()); done.await(); }
@Override public long currentEventSize() { return queue.size(); }
@Test void testCurrentEventSize() { assertEquals(0, publisher.currentEventSize()); publisher.publish(new MockEvent()); assertEquals(1, publisher.currentEventSize()); }
@Override public int hashCode() { return Objects.hashCode(server); }
@Test void hashCodeForNull() { final DiscoveryResult discoveryResult = new DiscoveryResult(null); assertNotNull(discoveryResult.hashCode()); assertEquals(0, discoveryResult.hashCode()); }
@Override public RemoteData.Builder serialize() { final RemoteData.Builder remoteBuilder = RemoteData.newBuilder(); remoteBuilder.addDataObjectStrings(value.toStorageData()); remoteBuilder.addDataLongs(getTimeBucket()); remoteBuilder.addDataStrings(entityId); remoteBuilder.addDataStrings(serviceId); return remoteBuilder; }
@Test public void testSerialize() { function.accept(MeterEntity.newService("service-test", Layer.GENERAL), HTTP_CODE_COUNT_1); MaxLabeledFunction function2 = new MaxLabeledFunctionInst(); function2.deserialize(function.serialize().build()); assertThat(function2.getEntityId()).isEqualTo(function.getEntityId()); assertThat(function2.getTimeBucket()).isEqualTo(function.getTimeBucket()); assertThat(function2.getServiceId()).isEqualTo(function.getServiceId()); assertThat(function2.getValue()).isEqualTo(function.getValue()); }
public static boolean checkpw(String plaintext, String hashed) { return equalsNoEarlyReturn(hashed, hashpw(plaintext, hashed)); }
@Test public void testCheckpw() { Assert.assertFalse(BCrypt.checkpw("foo", "$2a$10$......................")); final String hashed = BCrypt.hashpw("foo", BCrypt.gensalt()); Assert.assertTrue(BCrypt.checkpw("foo", hashed)); Assert.assertFalse(BCrypt.checkpw("bar", hashed)); }
Converter<E> compile() { head = tail = null; for (Node n = top; n != null; n = n.next) { switch (n.type) { case Node.LITERAL: addToList(new LiteralConverter<E>((String) n.getValue())); break; case Node.COMPOSITE_KEYWORD: CompositeNode cn = (CompositeNode) n; CompositeConverter<E> compositeConverter = createCompositeConverter(cn); if (compositeConverter == null) { addError("Failed to create converter for [%" + cn.getValue() + "] keyword"); addToList(new LiteralConverter<E>("%PARSER_ERROR[" + cn.getValue() + "]")); break; } compositeConverter.setFormattingInfo(cn.getFormatInfo()); compositeConverter.setOptionList(cn.getOptions()); Compiler<E> childCompiler = new Compiler<E>(cn.getChildNode(), converterMap); childCompiler.setContext(context); Converter<E> childConverter = childCompiler.compile(); compositeConverter.setChildConverter(childConverter); addToList(compositeConverter); break; case Node.SIMPLE_KEYWORD: SimpleKeywordNode kn = (SimpleKeywordNode) n; DynamicConverter<E> dynaConverter = createConverter(kn); if (dynaConverter != null) { dynaConverter.setFormattingInfo(kn.getFormatInfo()); dynaConverter.setOptionList(kn.getOptions()); addToList(dynaConverter); } else { // if the appropriate dynaconverter cannot be found, then replace // it with a dummy LiteralConverter indicating an error. Converter<E> errConveter = new LiteralConverter<E>("%PARSER_ERROR[" + kn.getValue() + "]"); addStatus(new ErrorStatus("[" + kn.getValue() + "] is not a valid conversion word", this)); addToList(errConveter); } } } return head; }
@Test public void testUnknownWord() throws Exception { Parser<Object> p = new Parser<Object>("%unknown"); p.setContext(context); Node t = p.parse(); p.compile(t, converterMap); StatusChecker checker = new StatusChecker(context.getStatusManager()); checker.assertContainsMatch("\\[unknown] is not a valid conversion word"); }
@Override synchronized int addRawRecords(final TopicPartition partition, final Iterable<ConsumerRecord<byte[], byte[]>> rawRecords) { return wrapped.addRawRecords(partition, rawRecords); }
@Test public void testAddRawRecords() { final TopicPartition partition = new TopicPartition("topic", 0); @SuppressWarnings("unchecked") final Iterable<ConsumerRecord<byte[], byte[]>> rawRecords = (Iterable<ConsumerRecord<byte[], byte[]>>) mock(Iterable.class); when(wrapped.addRawRecords(partition, rawRecords)).thenReturn(1); final int result = synchronizedPartitionGroup.addRawRecords(partition, rawRecords); assertEquals(1, result); verify(wrapped, times(1)).addRawRecords(partition, rawRecords); }
public static boolean hasCapability(Object item, String capability) { return item != null && capability != null && Capabilities.hasCapability(item.getClass(), capability); }
@Test public void shouldDetectAnnotationsOnDerivedClasses() { BravoClass bravo = new BravoClass(); CharlieClass charlie = new CharlieClass(); assertTrue("should find bravo cap on BravoClass", Capabilities.hasCapability(bravo, "bravo")); assertFalse("should not find blahblah cap on BravoClass", Capabilities.hasCapability(bravo, "blahblah")); assertTrue("should find charlie cap on CharlieClass", Capabilities.hasCapability(charlie, "charlie")); assertFalse("should not find blahblah cap on CharlieClass", Capabilities.hasCapability(charlie, "blahblah")); }
public static void checkMetaDir() throws InvalidMetaDirException, IOException { // check meta dir // if metaDir is the default config: StarRocksFE.STARROCKS_HOME_DIR + "/meta", // we should check whether both the new default dir (STARROCKS_HOME_DIR + "/meta") // and the old default dir (DORIS_HOME_DIR + "/doris-meta") are present. If both are present, // we need to let users keep only one to avoid starting from outdated metadata. Path oldDefaultMetaDir = Paths.get(System.getenv("DORIS_HOME") + "/doris-meta"); Path newDefaultMetaDir = Paths.get(System.getenv("STARROCKS_HOME") + "/meta"); Path metaDir = Paths.get(Config.meta_dir); if (metaDir.equals(newDefaultMetaDir)) { File oldMeta = new File(oldDefaultMetaDir.toUri()); File newMeta = new File(newDefaultMetaDir.toUri()); if (oldMeta.exists() && newMeta.exists()) { LOG.error("New default meta dir: {} and Old default meta dir: {} are both present. " + "Please make sure {} has the latest data, and remove the another one.", newDefaultMetaDir, oldDefaultMetaDir, newDefaultMetaDir); throw new InvalidMetaDirException(); } } File meta = new File(metaDir.toUri()); if (!meta.exists()) { // If metaDir is not the default config, it means the user has specified the other directory // We should not use the oldDefaultMetaDir. // Just exit in this case if (!metaDir.equals(newDefaultMetaDir)) { LOG.error("meta dir {} dose not exist", metaDir); throw new InvalidMetaDirException(); } File oldMeta = new File(oldDefaultMetaDir.toUri()); if (oldMeta.exists()) { // For backward compatible Config.meta_dir = oldDefaultMetaDir.toString(); } else { LOG.error("meta dir {} does not exist", meta.getAbsolutePath()); throw new InvalidMetaDirException(); } } long lowerFreeDiskSize = Long.parseLong(EnvironmentParams.FREE_DISK.getDefault()); FileStore store = Files.getFileStore(Paths.get(Config.meta_dir)); if (store.getUsableSpace() < lowerFreeDiskSize) { LOG.error("Free capacity left for meta dir: {} is less than {}", Config.meta_dir, new ByteSizeValue(lowerFreeDiskSize)); throw new InvalidMetaDirException(); } Path imageDir = Paths.get(Config.meta_dir + GlobalStateMgr.IMAGE_DIR); Path bdbDir = Paths.get(BDBEnvironment.getBdbDir()); boolean haveImageData = false; if (Files.exists(imageDir)) { try (Stream<Path> stream = Files.walk(imageDir)) { haveImageData = stream.anyMatch(path -> path.getFileName().toString().startsWith("image.")); } } boolean haveBDBData = false; if (Files.exists(bdbDir)) { try (Stream<Path> stream = Files.walk(bdbDir)) { haveBDBData = stream.anyMatch(path -> path.getFileName().toString().endsWith(".jdb")); } } if (haveImageData && !haveBDBData && !Config.start_with_incomplete_meta) { LOG.error("image exists, but bdb dir is empty, " + "set start_with_incomplete_meta to true if you want to forcefully recover from image data, " + "this may end with stale meta data, so please be careful."); throw new InvalidMetaDirException(); } }
@Test public void testImageExistBDBNotExistWithConfig() throws IOException, InvalidMetaDirException { Config.start_with_incomplete_meta = true; Config.meta_dir = testDir + "/meta"; mkdir(Config.meta_dir + "/image"); File file = new File(Config.meta_dir + "/image/image.123"); Assert.assertTrue(file.createNewFile()); try { MetaHelper.checkMetaDir(); } finally { deleteDir(new File(testDir + "/")); } }
public LU lu() { return lu(false); }
@Test public void testLU() { System.out.println("LU"); double[][] A = { {0.9000, 0.4000, 0.7000f}, {0.4000, 0.5000, 0.3000f}, {0.7000, 0.3000, 0.8000f} }; double[] b = {0.5, 0.5, 0.5f}; double[] x = {-0.2027027, 0.8783784, 0.4729730f}; BigMatrix a = BigMatrix.of(A); BigMatrix.LU lu = a.lu(); double[] x2 = lu.solve(b); assertEquals(x.length, x2.length); for (int i = 0; i < x.length; i++) { assertEquals(x[i], x2[i], 1E-7); } double[][] B = { {0.5, 0.2f}, {0.5, 0.8f}, {0.5, 0.3f} }; double[][] X = { {-0.2027027, -1.2837838f}, { 0.8783784, 2.2297297f}, { 0.4729730, 0.6621622f} }; BigMatrix X2 = BigMatrix.of(B); lu.solve(X2); assertEquals(X.length, X2.nrow()); assertEquals(X[0].length, X2.ncol()); for (int i = 0; i < X.length; i++) { for (int j = 0; j < X[i].length; j++) { assertEquals(X[i][j], X2.get(i, j), 1E-7); } } }
@Override public void unsubscribe(String serviceName, EventListener listener) throws NacosException { unsubscribe(serviceName, new ArrayList<>(), listener); }
@Test void testUnSubscribe2() throws NacosException { //given String serviceName = "service1"; String groupName = "group1"; EventListener listener = event -> { }; when(changeNotifier.isSubscribed(groupName, serviceName)).thenReturn(false); //when client.unsubscribe(serviceName, groupName, listener); NamingSelectorWrapper wrapper = new NamingSelectorWrapper( NamingSelectorFactory.newClusterSelector(Collections.emptyList()), listener); //then verify(changeNotifier, times(1)).deregisterListener(groupName, serviceName, wrapper); verify(proxy, times(1)).unsubscribe(serviceName, groupName, Constants.NULL); }
public boolean commitOffsetsSync(Map<TopicPartition, OffsetAndMetadata> offsets, Timer timer) { invokeCompletedOffsetCommitCallbacks(); if (offsets.isEmpty()) { // We guarantee that the callbacks for all commitAsync() will be invoked when // commitSync() completes, even if the user tries to commit empty offsets. return invokePendingAsyncCommits(timer); } long attempts = 0L; do { if (coordinatorUnknownAndUnreadySync(timer)) { return false; } RequestFuture<Void> future = sendOffsetCommitRequest(offsets); client.poll(future, timer); // We may have had in-flight offset commits when the synchronous commit began. If so, ensure that // the corresponding callbacks are invoked prior to returning in order to preserve the order that // the offset commits were applied. invokeCompletedOffsetCommitCallbacks(); if (future.succeeded()) { if (interceptors != null) interceptors.onCommit(offsets); return true; } if (future.failed() && !future.isRetriable()) throw future.exception(); timer.sleep(retryBackoff.backoff(attempts++)); } while (timer.notExpired()); return false; }
@Test public void testRetryCommitUnknownTopicOrPartition() { client.prepareResponse(groupCoordinatorResponse(node, Errors.NONE)); coordinator.ensureCoordinatorReady(time.timer(Long.MAX_VALUE)); client.prepareResponse(offsetCommitResponse(singletonMap(t1p, Errors.UNKNOWN_TOPIC_OR_PARTITION))); client.prepareResponse(offsetCommitResponse(singletonMap(t1p, Errors.NONE))); assertTrue(coordinator.commitOffsetsSync(singletonMap(t1p, new OffsetAndMetadata(100L, "metadata")), time.timer(10000))); }
@GetMapping("/list") @Secured(action = ActionTypes.READ, resource = "nacos/admin") public Result<List<String>> getClientList() { return Result.success(new ArrayList<>(clientManager.allClientId())); }
@Test void testGetClientList() throws Exception { MockHttpServletRequestBuilder mockHttpServletRequestBuilder = MockMvcRequestBuilders.get(URL + "/list"); MockHttpServletResponse response = mockmvc.perform(mockHttpServletRequestBuilder).andReturn().getResponse(); assertEquals(200, response.getStatus()); JsonNode jsonNode = JacksonUtils.toObj(response.getContentAsString()).get("data"); assertEquals(2, jsonNode.size()); }
public static UserAgent parse(String userAgentString) { return UserAgentParser.parse(userAgentString); }
@Test public void issueI8X5XQTest() { final String s = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 " + "Safari/537.36 Core/1.94.218.400 QQBrowser/12.1.5496.400"; final UserAgent ua2 = UserAgentUtil.parse(s); assertEquals("QQBrowser", ua2.getBrowser().toString()); assertEquals("12.1.5496.400", ua2.getVersion()); assertEquals("Webkit", ua2.getEngine().toString()); assertEquals("537.36", ua2.getEngineVersion()); assertEquals("Windows 10 or Windows Server 2016", ua2.getOs().toString()); assertEquals("10.0", ua2.getOsVersion()); assertEquals("Windows", ua2.getPlatform().toString()); assertFalse(ua2.isMobile()); }
public static boolean allIpAddressesValid(String ipAddresses) { if (!Strings.isNullOrEmpty(ipAddresses)) { return Lists.newArrayList(Splitter.on(",") .trimResults() .omitEmptyStrings() .split(ipAddresses)).stream() .map(hostAndPort -> HostAndPort.fromString(hostAndPort).withDefaultPort(DnsClient.DEFAULT_DNS_PORT)) .allMatch(hostAndPort -> isValidIpAddress(hostAndPort.getHost())); } return false; }
@Test public void testValidCommaSeparatedIps() { assertTrue(DnsClient.allIpAddressesValid("8.8.4.4:53, 8.8.8.8")); // Custom port. assertTrue(DnsClient.allIpAddressesValid("8.8.4.4, ")); // Extra comma assertTrue(DnsClient.allIpAddressesValid("8.8.4.4")); // Pure IP assertFalse(DnsClient.allIpAddressesValid("8.8.4.4dfs:53, 8.8.4.4:59")); // Not an IP address assertFalse(DnsClient.allIpAddressesValid("8.8.4.4 8.8.8.8")); // No comma separator assertFalse(DnsClient.allIpAddressesValid("8.8.4.4, google.com")); // Hostname is not an IP address }
protected Map<String, String> formatResult(String url, String content) { return Map.of( "url", url, "content", trimContent(content) ); }
@Test void testFormatResultWithNullContent() { String url = "http://example.com"; Map<String, String> result = rawBrowserAction.formatResult(url, null); assertEquals(url, result.get("url")); assertEquals("", result.get("content")); }
public WithJsonPath(JsonPath jsonPath, Matcher<T> resultMatcher) { this.jsonPath = jsonPath; this.resultMatcher = resultMatcher; }
@Test public void shouldFailOnInvalidJsonPath() { assertThrows(InvalidPathException.class, () -> withJsonPath("$[}")); }
@VisibleForTesting boolean isDatabaseWithNameExist( DatabaseMeta databaseMeta, boolean isNew ) { for ( int i = 0; i < repositoriesMeta.nrDatabases(); i++ ) { final DatabaseMeta iterDatabase = repositoriesMeta.getDatabase( i ); if ( iterDatabase.getName().trim().equalsIgnoreCase( databaseMeta.getName().trim() ) ) { if ( isNew || databaseMeta != iterDatabase ) { // do not check the same instance return true; } } } return false; }
@Test public void testIsDatabaseWithNameExist() throws Exception { final DatabaseMeta databaseMeta1 = new DatabaseMeta(); databaseMeta1.setName( "TestDB1" ); controller.addDatabase( databaseMeta1 ); final DatabaseMeta databaseMeta2 = new DatabaseMeta(); databaseMeta2.setName( "TestDB2" ); controller.addDatabase( databaseMeta2 ); when( repositoriesMeta.nrDatabases() ).thenReturn( 2 ); when( repositoriesMeta.getDatabase( 0 ) ).thenReturn( databaseMeta1 ); when( repositoriesMeta.getDatabase( 1 ) ).thenReturn( databaseMeta2 ); //existing databases assertFalse( controller.isDatabaseWithNameExist( databaseMeta1, false ) ); databaseMeta2.setName( "TestDB1" ); assertTrue( controller.isDatabaseWithNameExist( databaseMeta2, false ) ); //new databases final DatabaseMeta databaseMeta3 = new DatabaseMeta(); databaseMeta3.setName( "TestDB3" ); assertFalse( controller.isDatabaseWithNameExist( databaseMeta3, true ) ); databaseMeta3.setName( "TestDB1" ); assertTrue( controller.isDatabaseWithNameExist( databaseMeta3, true ) ); }
public static <C> AsyncBuilder<C> builder() { return new AsyncBuilder<>(); }
@Test void throwsOriginalExceptionAfterFailedRetries() throws Throwable { server.enqueue(new MockResponse().setResponseCode(503).setBody("foo 1")); server.enqueue(new MockResponse().setResponseCode(503).setBody("foo 2")); final String message = "the innerest"; TestInterfaceAsync api = AsyncFeign.builder().exceptionPropagationPolicy(UNWRAP) .retryer(new Retryer.Default(1, 1, 2)) .errorDecoder( (methodKey, response) -> new RetryableException(response.status(), "play it again sam!", HttpMethod.POST, new TestInterfaceException(message), NON_RETRYABLE, response.request())) .target(TestInterfaceAsync.class, "http://localhost:" + server.getPort()); Throwable exception = assertThrows(TestInterfaceException.class, () -> unwrap(api.post())); assertThat(exception.getMessage()).contains(message); }
@Override public String operation() { return "receive"; }
@Test void operation() { assertThat(request.operation()).isEqualTo("receive"); }
@Override public void execute(String commandName, BufferedReader reader, BufferedWriter writer) throws Py4JException, IOException { String fqn = reader.readLine(); List<Object> arguments = getArguments(reader); ReturnObject returnObject = invokeConstructor(fqn, arguments); String returnCommand = Protocol.getOutputCommand(returnObject); logger.finest("Returning command: " + returnCommand); writer.write(returnCommand); writer.flush(); }
@Test public void testConstructor1Arg() { String inputCommand = "py4j.examples.ExampleClass\ni5\ne\n"; try { command.execute("i", new BufferedReader(new StringReader(inputCommand)), writer); assertEquals("!yro0\n", sWriter.toString()); } catch (Exception e) { e.printStackTrace(); fail(); } }
static Properties resolveConsumerProperties(Map<String, String> options, Object keySchema, Object valueSchema) { Properties properties = from(options); withSerdeConsumerProperties(true, options, keySchema, properties); withSerdeConsumerProperties(false, options, valueSchema, properties); return properties; }
@Test public void test_consumerProperties_avro_schemaRegistry() { // key assertThat(resolveConsumerProperties(Map.of( OPTION_KEY_FORMAT, AVRO_FORMAT, "schema.registry.url", "http://localhost:8081" ))).containsExactlyInAnyOrderEntriesOf(Map.of( KEY_DESERIALIZER, KafkaAvroDeserializer.class.getCanonicalName(), "schema.registry.url", "http://localhost:8081" )); // value assertThat(resolveConsumerProperties(Map.of( OPTION_KEY_FORMAT, UNKNOWN_FORMAT, OPTION_VALUE_FORMAT, AVRO_FORMAT, "schema.registry.url", "http://localhost:8081" ))).containsExactlyInAnyOrderEntriesOf(Map.of( VALUE_DESERIALIZER, KafkaAvroDeserializer.class.getCanonicalName(), "schema.registry.url", "http://localhost:8081" )); }
private IcebergTimestampObjectInspector() { super(TypeInfoFactory.timestampTypeInfo); }
@Test public void testIcebergTimestampObjectInspector() { IcebergTimestampObjectInspector oi = IcebergTimestampObjectInspector.get(); assertThat(oi.getCategory()).isEqualTo(ObjectInspector.Category.PRIMITIVE); assertThat(oi.getPrimitiveCategory()) .isEqualTo(PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP); assertThat(oi.getTypeInfo()).isEqualTo(TypeInfoFactory.timestampTypeInfo); assertThat(oi.getTypeName()).isEqualTo(TypeInfoFactory.timestampTypeInfo.getTypeName()); assertThat(oi.getJavaPrimitiveClass()).isEqualTo(Timestamp.class); assertThat(oi.getPrimitiveWritableClass()).isEqualTo(TimestampWritable.class); assertThat(oi.copyObject(null)).isNull(); assertThat(oi.getPrimitiveJavaObject(null)).isNull(); assertThat(oi.getPrimitiveWritableObject(null)).isNull(); assertThat(oi.convert(null)).isNull(); LocalDateTime local = LocalDateTime.of(2020, 1, 1, 12, 55, 30, 5560000); Timestamp ts = Timestamp.valueOf(local); assertThat(oi.getPrimitiveJavaObject(local)).isEqualTo(ts); assertThat(oi.getPrimitiveWritableObject(local)).isEqualTo(new TimestampWritable(ts)); Timestamp copy = (Timestamp) oi.copyObject(ts); assertThat(copy).isEqualTo(ts); assertThat(copy).isNotSameAs(ts); assertThat(oi.preferWritable()).isFalse(); assertThat(oi.convert(ts)).isEqualTo(local); }
@Override public RegisteredClient getClientConfiguration(ServerConfiguration issuer) { RegisteredClient client = staticClientService.getClientConfiguration(issuer); if (client != null) { return client; } else { return dynamicClientService.getClientConfiguration(issuer); } }
@Test public void getClientConfiguration_useDynamic() { Mockito.when(mockStaticService.getClientConfiguration(mockServerConfig)).thenReturn(null); Mockito.when(mockDynamicService.getClientConfiguration(mockServerConfig)).thenReturn(mockClient); RegisteredClient result = hybridService.getClientConfiguration(mockServerConfig); Mockito.verify(mockStaticService).getClientConfiguration(mockServerConfig); Mockito.verify(mockDynamicService).getClientConfiguration(mockServerConfig); assertEquals(mockClient, result); }
private static Map<String, Set<Dependency>> checkOptionalFlags( Map<String, Set<Dependency>> bundledDependenciesByModule, Map<String, DependencyTree> dependenciesByModule) { final Map<String, Set<Dependency>> allViolations = new HashMap<>(); for (String module : bundledDependenciesByModule.keySet()) { LOG.debug("Checking module '{}'.", module); if (!dependenciesByModule.containsKey(module)) { throw new IllegalStateException( String.format( "Module %s listed by shade-plugin, but not dependency-plugin.", module)); } final Collection<Dependency> bundledDependencies = bundledDependenciesByModule.get(module); final DependencyTree dependencyTree = dependenciesByModule.get(module); final Set<Dependency> violations = checkOptionalFlags(module, bundledDependencies, dependencyTree); if (violations.isEmpty()) { LOG.info("OK: {}", module); } else { allViolations.put(module, violations); } } return allViolations; }
@Test void testTransitiveBundledOptionalDependencyIsAccepted() { final Dependency dependencyA = createMandatoryDependency("a"); final Dependency dependencyB = createOptionalDependency("b"); final Set<Dependency> bundled = Collections.singleton(dependencyB); final DependencyTree dependencyTree = new DependencyTree() .addDirectDependency(dependencyA) .addTransitiveDependencyTo(dependencyB, dependencyA); final Set<Dependency> violations = ShadeOptionalChecker.checkOptionalFlags(MODULE, bundled, dependencyTree); assertThat(violations).isEmpty(); }
public static Builder newBuilder() { return new Builder(); }
@Test public void testBuilderDefaultsToCommitTimestampWhenCreatedAtIsNotGiven() { PartitionMetadata expectedPartitionMetadata = new PartitionMetadata( PARTITION_TOKEN, Sets.newHashSet(PARENT_TOKEN), START_TIMESTAMP, END_TIMESTAMP, 10, State.RUNNING, WATERMARK, Value.COMMIT_TIMESTAMP, SCHEDULED_AT, RUNNING_AT, FINISHED_AT); PartitionMetadata actualPartitionMetadata = PartitionMetadata.newBuilder() .setPartitionToken(PARTITION_TOKEN) .setParentTokens(Sets.newHashSet(PARENT_TOKEN)) .setStartTimestamp(START_TIMESTAMP) .setEndTimestamp(END_TIMESTAMP) .setHeartbeatMillis(10) .setState(State.RUNNING) .setWatermark(WATERMARK) .setScheduledAt(SCHEDULED_AT) .setRunningAt(RUNNING_AT) .setFinishedAt(FINISHED_AT) .build(); assertEquals(expectedPartitionMetadata, actualPartitionMetadata); }
public static void replaceVariableValues( VariableSpace childTransMeta, VariableSpace replaceBy, String type ) { if ( replaceBy == null ) { return; } String[] variableNames = replaceBy.listVariables(); for ( String variableName : variableNames ) { if ( childTransMeta.getVariable( variableName ) != null && !isInternalVariable( variableName, type ) ) { childTransMeta.setVariable( variableName, replaceBy.getVariable( variableName ) ); } } }
@Test public void replaceVariablesWithJobInternalVariablesTest() { String variableOverwrite = "paramOverwrite"; String variableChildOnly = "childValueVariable"; String [] jobVariables = Const.INTERNAL_JOB_VARIABLES; VariableSpace ChildVariables = new Variables(); VariableSpace replaceByParentVariables = new Variables(); for ( String internalVariable : jobVariables ) { ChildVariables.setVariable( internalVariable, "childValue" ); replaceByParentVariables.setVariable( internalVariable, "parentValue" ); } ChildVariables.setVariable( variableChildOnly, "childValueVariable" ); ChildVariables.setVariable( variableOverwrite, "childNotInternalValue" ); replaceByParentVariables.setVariable( variableOverwrite, "parentNotInternalValue" ); StepWithMappingMeta.replaceVariableValues( ChildVariables, replaceByParentVariables ); // do not replace internal variables Assert.assertEquals( "childValue", ChildVariables.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ) ); // replace non internal variables Assert.assertEquals( "parentNotInternalValue", ChildVariables.getVariable( variableOverwrite ) ); // keep child only variables Assert.assertEquals( variableChildOnly, ChildVariables.getVariable( variableChildOnly ) ); }
public static int byteSize(HyperLogLog value) { // 8 bytes header (log2m & register set size) & register set data return value.sizeof() + 2 * Integer.BYTES; }
@Test public void testByteSize() throws IOException { for (int log2m = 0; log2m < 16; log2m++) { HyperLogLog hll = new HyperLogLog(log2m); int expectedByteSize = hll.getBytes().length; assertEquals(HyperLogLogUtils.byteSize(log2m), expectedByteSize); assertEquals(HyperLogLogUtils.byteSize(hll), expectedByteSize); } }
public static MqttMessage newMessage(MqttFixedHeader mqttFixedHeader, Object variableHeader, Object payload) { switch (mqttFixedHeader.messageType()) { case CONNECT : return new MqttConnectMessage( mqttFixedHeader, (MqttConnectVariableHeader) variableHeader, (MqttConnectPayload) payload); case CONNACK: return new MqttConnAckMessage(mqttFixedHeader, (MqttConnAckVariableHeader) variableHeader); case SUBSCRIBE: return new MqttSubscribeMessage( mqttFixedHeader, (MqttMessageIdVariableHeader) variableHeader, (MqttSubscribePayload) payload); case SUBACK: return new MqttSubAckMessage( mqttFixedHeader, (MqttMessageIdVariableHeader) variableHeader, (MqttSubAckPayload) payload); case UNSUBACK: return new MqttUnsubAckMessage( mqttFixedHeader, (MqttMessageIdVariableHeader) variableHeader, (MqttUnsubAckPayload) payload); case UNSUBSCRIBE: return new MqttUnsubscribeMessage( mqttFixedHeader, (MqttMessageIdVariableHeader) variableHeader, (MqttUnsubscribePayload) payload); case PUBLISH: return new MqttPublishMessage( mqttFixedHeader, (MqttPublishVariableHeader) variableHeader, (ByteBuf) payload); case PUBACK: //Having MqttPubReplyMessageVariableHeader or MqttMessageIdVariableHeader return new MqttPubAckMessage(mqttFixedHeader, (MqttMessageIdVariableHeader) variableHeader); case PUBREC: case PUBREL: case PUBCOMP: //Having MqttPubReplyMessageVariableHeader or MqttMessageIdVariableHeader return new MqttMessage(mqttFixedHeader, variableHeader); case PINGREQ: case PINGRESP: return new MqttMessage(mqttFixedHeader); case DISCONNECT: case AUTH: //Having MqttReasonCodeAndPropertiesVariableHeader return new MqttMessage(mqttFixedHeader, variableHeader); default: throw new IllegalArgumentException("unknown message type: " + mqttFixedHeader.messageType()); } }
@Test public void createUnsubAckV5() { MqttFixedHeader fixedHeader = new MqttFixedHeader(MqttMessageType.UNSUBACK, false, MqttQoS.AT_MOST_ONCE, false, 0); MqttProperties properties = new MqttProperties(); String reasonString = "All right"; properties.add(new MqttProperties.StringProperty( MqttProperties.MqttPropertyType.REASON_STRING.value(), reasonString)); MqttMessageIdAndPropertiesVariableHeader variableHeader = new MqttMessageIdAndPropertiesVariableHeader(SAMPLE_MESSAGE_ID, properties); MqttUnsubAckPayload payload = new MqttUnsubAckPayload((short) 0x80 /*unspecified error*/); MqttMessage unsuback = MqttMessageFactory.newMessage(fixedHeader, variableHeader, payload); assertEquals(MqttMessageType.UNSUBACK, unsuback.fixedHeader().messageType()); MqttMessageIdAndPropertiesVariableHeader actualVariableHeader = (MqttMessageIdAndPropertiesVariableHeader) unsuback.variableHeader(); assertEquals(SAMPLE_MESSAGE_ID, actualVariableHeader.messageId()); validateProperties(properties, actualVariableHeader.properties()); MqttUnsubAckPayload actualPayload = (MqttUnsubAckPayload) unsuback.payload(); assertEquals(payload.unsubscribeReasonCodes(), actualPayload.unsubscribeReasonCodes()); }
@Override public void configure(Map<String, ?> configs) { super.configure(configs); configureSamplingInterval(configs); configurePrometheusAdapter(configs); configureQueryMap(configs); }
@Test(expected = ConfigException.class) public void testGetSamplesWithCustomMalformedSamplingInterval() throws Exception { Map<String, Object> config = new HashMap<>(); config.put(PROMETHEUS_SERVER_ENDPOINT_CONFIG, "http://kafka-cluster-1.org:9090"); config.put(PROMETHEUS_QUERY_RESOLUTION_STEP_MS_CONFIG, "non-number"); addCapacityConfig(config); _prometheusMetricSampler.configure(config); }
@SuppressWarnings("deprecation") public static void setupDistributedCache(Configuration conf, Map<String, LocalResource> localResources) throws IOException { LocalResourceBuilder lrb = new LocalResourceBuilder(); lrb.setConf(conf); // Cache archives lrb.setType(LocalResourceType.ARCHIVE); lrb.setUris(JobContextImpl.getCacheArchives(conf)); lrb.setTimestamps(JobContextImpl.getArchiveTimestamps(conf)); lrb.setSizes(getFileSizes(conf, MRJobConfig.CACHE_ARCHIVES_SIZES)); lrb.setVisibilities(DistributedCache.getArchiveVisibilities(conf)); lrb.setSharedCacheUploadPolicies( Job.getArchiveSharedCacheUploadPolicies(conf)); lrb.createLocalResources(localResources); // Cache files lrb.setType(LocalResourceType.FILE); lrb.setUris(JobContextImpl.getCacheFiles(conf)); lrb.setTimestamps(JobContextImpl.getFileTimestamps(conf)); lrb.setSizes(getFileSizes(conf, MRJobConfig.CACHE_FILES_SIZES)); lrb.setVisibilities(DistributedCache.getFileVisibilities(conf)); lrb.setSharedCacheUploadPolicies( Job.getFileSharedCacheUploadPolicies(conf)); lrb.createLocalResources(localResources); }
@SuppressWarnings("deprecation") @Test @Timeout(120000) public void testSetupDistributedCacheConflicts() throws Exception { Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); URI mockUri = URI.create("mockfs://mock/"); FileSystem mockFs = ((FilterFileSystem) FileSystem.get(mockUri, conf)) .getRawFileSystem(); URI archive = new URI("mockfs://mock/tmp/something.zip#something"); Path archivePath = new Path(archive); URI file = new URI("mockfs://mock/tmp/something.txt#something"); Path filePath = new Path(file); when(mockFs.resolvePath(archivePath)).thenReturn(archivePath); when(mockFs.resolvePath(filePath)).thenReturn(filePath); Job.addCacheArchive(archive, conf); conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10"); conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10"); conf.set(MRJobConfig.CACHE_ARCHIVES_VISIBILITIES, "true"); Job.addCacheFile(file, conf); conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11"); conf.set(MRJobConfig.CACHE_FILES_SIZES, "11"); conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true"); Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(); MRApps.setupDistributedCache(conf, localResources); assertEquals(1, localResources.size()); LocalResource lr = localResources.get("something"); //Archive wins assertNotNull(lr); assertEquals(10l, lr.getSize()); assertEquals(10l, lr.getTimestamp()); assertEquals(LocalResourceType.ARCHIVE, lr.getType()); }
@Override public void onPartitionsAssigned(final Collection<TopicPartition> partitions) { // NB: all task management is already handled by: // org.apache.kafka.streams.processor.internals.StreamsPartitionAssignor.onAssignment if (assignmentErrorCode.get() == AssignorError.INCOMPLETE_SOURCE_TOPIC_METADATA.code()) { log.error("Received error code {}. {}", AssignorError.INCOMPLETE_SOURCE_TOPIC_METADATA.codeName(), AssignorError.INCOMPLETE_SOURCE_TOPIC_METADATA.description()); taskManager.handleRebalanceComplete(); throw new MissingSourceTopicException("One or more source topics were missing during rebalance"); } else if (assignmentErrorCode.get() == AssignorError.VERSION_PROBING.code()) { log.info("Received version probing code {}", AssignorError.VERSION_PROBING); } else if (assignmentErrorCode.get() == AssignorError.ASSIGNMENT_ERROR.code()) { log.error("Received error code {}", AssignorError.ASSIGNMENT_ERROR); taskManager.handleRebalanceComplete(); throw new TaskAssignmentException("Hit an unexpected exception during task assignment phase of rebalance"); } else if (assignmentErrorCode.get() == AssignorError.SHUTDOWN_REQUESTED.code()) { log.error("A Kafka Streams client in this Kafka Streams application is requesting to shutdown the application"); taskManager.handleRebalanceComplete(); streamThread.shutdownToError(); return; } else if (assignmentErrorCode.get() != AssignorError.NONE.code()) { log.error("Received unknown error code {}", assignmentErrorCode.get()); throw new TaskAssignmentException("Hit an unrecognized exception during rebalance"); } streamThread.setState(State.PARTITIONS_ASSIGNED); streamThread.setPartitionAssignedTime(time.milliseconds()); taskManager.handleRebalanceComplete(); }
@Test public void shouldHandleAssignedPartitions() { assignmentErrorCode.set(AssignorError.NONE.code()); streamsRebalanceListener.onPartitionsAssigned(Collections.emptyList()); verify(streamThread).setState(State.PARTITIONS_ASSIGNED); verify(streamThread).setPartitionAssignedTime(time.milliseconds()); verify(taskManager).handleRebalanceComplete(); }
public static Connection getConnection(final String databaseName, final ContextManager contextManager) { return STATES.get(contextManager.getComputeNodeInstanceContext().getInstance().getState().getCurrentState()).getConnection(databaseName, contextManager); }
@Test void assertGetConnectionWithCircuitBreakState() { ContextManager contextManager = mockContextManager(InstanceState.CIRCUIT_BREAK); assertThat(DriverStateContext.getConnection(DefaultDatabase.LOGIC_NAME, contextManager), instanceOf(CircuitBreakerConnection.class)); }
private String getHtmlCharset(String contentType, byte[] contentBytes, Task task) throws IOException { String charset = CharsetUtils.detectCharset(contentType, contentBytes); if (charset == null) { charset = Optional.ofNullable(task.getSite().getDefaultCharset()).orElseGet(Charset.defaultCharset()::name); } return charset; }
@Test public void testGetHtmlCharset() throws Exception { HttpServer server = httpServer(13423); server.get(by(uri("/header"))).response(header("Content-Type", "text/html; charset=gbk")); server.get(by(uri("/meta4"))).response(with(text("<html>\n" + " <head>\n" + " <meta charset='gbk'/>\n" + " </head>\n" + " <body></body>\n" + "</html>")),header("Content-Type","text/html; charset=gbk")); server.get(by(uri("/meta5"))).response(with(text("<html>\n" + " <head>\n" + " <meta http-equiv=\"Content-Type\" content=\"text/html; charset=gbk\" />\n" + " </head>\n" + " <body></body>\n" + "</html>")),header("Content-Type","text/html")); Runner.running(server, new Runnable() { @Override public void run() { String charset = getCharsetByUrl("http://127.0.0.1:13423/header"); assertEquals(charset, "gbk"); charset = getCharsetByUrl("http://127.0.0.1:13423/meta4"); assertEquals(charset, "gbk"); charset = getCharsetByUrl("http://127.0.0.1:13423/meta5"); assertEquals(charset, "gbk"); } private String getCharsetByUrl(String url) { HttpClientDownloader downloader = new HttpClientDownloader(); Site site = Site.me(); CloseableHttpClient httpClient = new HttpClientGenerator().getClient(site); // encoding in http header Content-Type Request requestGBK = new Request(url); CloseableHttpResponse httpResponse = null; try { httpResponse = httpClient.execute(new HttpUriRequestConverter().convert(requestGBK, site, null).getHttpUriRequest()); } catch (IOException e) { e.printStackTrace(); } String charset = null; try { byte[] contentBytes = IOUtils.toByteArray(httpResponse.getEntity().getContent()); charset = CharsetUtils.detectCharset(httpResponse.getEntity().getContentType().getValue(), contentBytes); } catch (IOException e) { e.printStackTrace(); } return charset; } }); }
public static Date parseDA(TimeZone tz, String s) { return parseDA(tz, s, false); }
@Test public void testParseDAceil() { assertEquals(DAY - 2 * HOUR - 1, DateUtils.parseDA(tz, "19700101", true).getTime()); }
public boolean matches(String matchUrl) { if (url.equals(matchUrl)) return true; Iterator<UrlPathPart> iter1 = new MatchUrl(matchUrl).pathParts.iterator(); Iterator<UrlPathPart> iter2 = pathParts.iterator(); while (iter1.hasNext() && iter2.hasNext()) if (!iter1.next().matches(iter2.next())) return false; return !iter1.hasNext() && !iter2.hasNext(); }
@Test void testNoMatch() { boolean matches = new MatchUrl("/api").matches("/dashboard"); assertThat(matches).isFalse(); }
public ConfigCheckResult checkConfig() { Optional<Long> appId = getAppId(); if (appId.isEmpty()) { return failedApplicationStatus(INVALID_APP_ID_STATUS); } GithubAppConfiguration githubAppConfiguration = new GithubAppConfiguration(appId.get(), gitHubSettings.privateKey(), gitHubSettings.apiURLOrDefault()); return checkConfig(githubAppConfiguration); }
@Test public void checkConfig_whenInstallationSuspended_shouldReturnFailedInstallationAutoProvisioningCheck() { mockGithubConfiguration(); ArgumentCaptor<GithubAppConfiguration> appConfigurationCaptor = ArgumentCaptor.forClass(GithubAppConfiguration.class); mockGithubAppWithValidConfig(appConfigurationCaptor); mockSuspendedOrganizations("org1"); ConfigCheckResult checkResult = configValidator.checkConfig(); assertSuccessfulAppConfig(checkResult); assertThat(checkResult.installations()) .extracting(InstallationStatus::organization, InstallationStatus::autoProvisioning) .containsExactly(tuple("org1", ConfigStatus.failed(SUSPENDED_INSTALLATION))); verify(githubClient).getWhitelistedGithubAppInstallations(appConfigurationCaptor.capture()); verifyAppConfiguration(appConfigurationCaptor.getValue()); }
@Override public boolean supports(final AuthenticationToken token) { return token instanceof BearerToken; }
@Test public void testSupports() { BearerToken token = mock(BearerToken.class); assertTrue(shiroRealm.supports(token)); }
public Map<String, Parameter> generateMergedStepParams( WorkflowSummary workflowSummary, Step stepDefinition, StepRuntime stepRuntime, StepRuntimeSummary runtimeSummary) { Map<String, ParamDefinition> allParamDefs = new LinkedHashMap<>(); // Start with default step level params if present Map<String, ParamDefinition> globalDefault = defaultParamManager.getDefaultStepParams(); if (globalDefault != null) { ParamsMergeHelper.mergeParams( allParamDefs, globalDefault, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_DEFAULT)); } // Merge in injected params returned by step if present (template schema) Map<String, ParamDefinition> injectedParams = stepRuntime.injectRuntimeParams(workflowSummary, stepDefinition); maybeOverrideParamType(allParamDefs); if (injectedParams != null) { maybeOverrideParamType(injectedParams); ParamsMergeHelper.mergeParams( allParamDefs, injectedParams, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.TEMPLATE_SCHEMA)); } // Merge in params applicable to step type Optional<Map<String, ParamDefinition>> defaultStepTypeParams = defaultParamManager.getDefaultParamsForType(stepDefinition.getType()); if (defaultStepTypeParams.isPresent()) { LOG.debug("Merging step level default for {}", stepDefinition.getType()); ParamsMergeHelper.mergeParams( allParamDefs, defaultStepTypeParams.get(), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_DEFAULT)); } // Merge in workflow and step info ParamsMergeHelper.mergeParams( allParamDefs, injectWorkflowAndStepInfoParams(workflowSummary, runtimeSummary), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_INJECTED)); // merge step run param and user provided restart step run params // first to get undefined params from both run param and restart params Map<String, ParamDefinition> undefinedRestartParams = new LinkedHashMap<>(); Optional<Map<String, ParamDefinition>> stepRestartParams = getUserStepRestartParam(workflowSummary, runtimeSummary); stepRestartParams.ifPresent(undefinedRestartParams::putAll); Optional<Map<String, ParamDefinition>> stepRunParams = getStepRunParams(workflowSummary, runtimeSummary); Map<String, ParamDefinition> systemInjectedRestartRunParams = new LinkedHashMap<>(); stepRunParams.ifPresent( params -> { params.forEach( (key, val) -> { if (runtimeSummary.getRestartConfig() != null && Constants.RESERVED_PARAM_NAMES.contains(key) && val.getMode() == ParamMode.CONSTANT && val.getSource() == ParamSource.SYSTEM_INJECTED) { ((AbstractParamDefinition) val) .getMeta() .put(Constants.METADATA_SOURCE_KEY, ParamSource.RESTART.name()); systemInjectedRestartRunParams.put(key, val); } }); systemInjectedRestartRunParams.keySet().forEach(params::remove); }); stepRunParams.ifPresent(undefinedRestartParams::putAll); Optional.ofNullable(stepDefinition.getParams()) .ifPresent( stepDefParams -> stepDefParams.keySet().stream() .filter(undefinedRestartParams::containsKey) .forEach(undefinedRestartParams::remove)); // Then merge undefined restart params if (!undefinedRestartParams.isEmpty()) { mergeUserProvidedStepParams(allParamDefs, undefinedRestartParams, workflowSummary); } // Final merge from step definition if (stepDefinition.getParams() != null) { maybeOverrideParamType(stepDefinition.getParams()); ParamsMergeHelper.mergeParams( allParamDefs, stepDefinition.getParams(), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.DEFINITION)); } // merge step run params stepRunParams.ifPresent( stepParams -> mergeUserProvidedStepParams(allParamDefs, stepParams, workflowSummary)); // merge all user provided restart step run params stepRestartParams.ifPresent( stepParams -> mergeUserProvidedStepParams(allParamDefs, stepParams, workflowSummary)); // merge all system injected restart step run params with mode and source already set. allParamDefs.putAll(systemInjectedRestartRunParams); // Cleanup any params that are missing and convert to params return ParamsMergeHelper.convertToParameters(ParamsMergeHelper.cleanupParams(allParamDefs)); }
@Test public void testRestartWithSystemInjectedConstantStepParam() { ParamDefinition param = StringParamDefinition.builder() .name("AUTHORIZED_MANAGERS") .value("test-auth-manager") .addMetaField(Constants.METADATA_SOURCE_KEY, ParamSource.SYSTEM_INJECTED.name()) .mode(ParamMode.CONSTANT) .build(); Map<String, Map<String, ParamDefinition>> stepRunParams = singletonMap( "stepid", twoItemMap( "p1", ParamDefinition.buildParamDefinition("p1", "d1"), param.getName(), param)); workflowSummary.setStepRunParams(stepRunParams); runtimeSummary.setRestartConfig(RestartConfig.builder().build()); Map<String, Parameter> stepParams = paramsManager.generateMergedStepParams(workflowSummary, step, stepRuntime, runtimeSummary); Assert.assertFalse(stepParams.isEmpty()); Assert.assertEquals("d1", stepParams.get("p1").asStringParam().getValue()); Assert.assertEquals( "test-auth-manager", stepParams.get("AUTHORIZED_MANAGERS").asStringParam().getValue()); Assert.assertEquals(ParamMode.CONSTANT, stepParams.get("AUTHORIZED_MANAGERS").getMode()); Assert.assertEquals(ParamSource.RESTART, stepParams.get("AUTHORIZED_MANAGERS").getSource()); }
@Override protected Map<String, ConfigValue> validateSourceConnectorConfig(SourceConnector connector, ConfigDef configDef, Map<String, String> config) { Map<String, ConfigValue> result = super.validateSourceConnectorConfig(connector, configDef, config); validateSourceConnectorExactlyOnceSupport(config, result, connector); validateSourceConnectorTransactionBoundary(config, result, connector); return result; }
@Test public void testExactlyOnceSourceSupportValidation() { herder = exactlyOnceHerder(); Map<String, String> config = new HashMap<>(); config.put(SourceConnectorConfig.EXACTLY_ONCE_SUPPORT_CONFIG, REQUIRED.toString()); SourceConnector connectorMock = mock(SourceConnector.class); when(connectorMock.exactlyOnceSupport(eq(config))).thenReturn(ExactlyOnceSupport.SUPPORTED); Map<String, ConfigValue> validatedConfigs = herder.validateSourceConnectorConfig( connectorMock, SourceConnectorConfig.configDef(), config); List<String> errors = validatedConfigs.get(SourceConnectorConfig.EXACTLY_ONCE_SUPPORT_CONFIG).errorMessages(); assertEquals(Collections.emptyList(), errors); }
public static byte[] hexStringToByteArray(String hexEncodedBinary) { if (hexEncodedBinary.length() % 2 == 0) { char[] sc = hexEncodedBinary.toCharArray(); byte[] ba = new byte[sc.length / 2]; for (int i = 0; i < ba.length; i++) { int nibble0 = Character.digit(sc[i * 2], 16); int nibble1 = Character.digit(sc[i * 2 + 1], 16); if (nibble0 == -1 || nibble1 == -1){ throw new IllegalArgumentException( "Hex-encoded binary string contains an invalid hex digit in '"+sc[i * 2]+sc[i * 2 + 1]+"'"); } ba[i] = (byte) ((nibble0 << 4) | nibble1); } return ba; } else { throw new IllegalArgumentException( "Hex-encoded binary string contains an uneven no. of digits"); } }
@Test public void testHexStringToByteArray() throws Exception { byte [] ba; ba = BinaryTCPClientImpl.hexStringToByteArray(""); assertEquals(0, ba.length); ba = BinaryTCPClientImpl.hexStringToByteArray("00"); assertEquals(1, ba.length); assertEquals(0, ba[0]); ba = BinaryTCPClientImpl.hexStringToByteArray("0f107F8081ff"); assertEquals(6, ba.length); assertEquals(15, ba[0]); assertEquals(16, ba[1]); assertEquals(127, ba[2]); assertEquals(-128, ba[3]); assertEquals(-127, ba[4]); assertEquals(-1, ba[5]); try { ba = BinaryTCPClientImpl.hexStringToByteArray("0f107f8081ff1");// odd chars fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected){ // ignored } try { BinaryTCPClientImpl.hexStringToByteArray("0f107xxf8081ff"); // invalid fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected){ // ignored } }
public static Builder newChangesetBuilder() { return new Builder(); }
@Test public void fail_with_NPE_when_setting_null_date() { assertThatThrownBy(() -> Changeset.newChangesetBuilder().setDate(null)) .isInstanceOf(NullPointerException.class) .hasMessage("Date cannot be null"); }
public static void setInstanceIdIfEmpty(Instance instance, String groupedServiceName) { if (null != instance && StringUtils.isEmpty(instance.getInstanceId())) { if (StringUtils.isBlank(instance.getServiceName())) { instance.setServiceName(groupedServiceName); } instance.setInstanceId(InstanceIdGeneratorManager.generateInstanceId(instance)); } }
@Test void testSetInstanceIdIfEmpty() { Instance instance = new Instance(); instance.setIp("1.1.1.1"); instance.setPort(8890); String groupedServiceName = "test"; instance.setClusterName("testCluster"); InstanceUtil.setInstanceIdIfEmpty(instance, groupedServiceName); assertNotNull(instance.getInstanceId()); assertEquals(instance.getInstanceId(), InstanceIdGeneratorManager.generateInstanceId(instance)); String customInsId = "customInstanceId_1"; Instance instance1 = new Instance(); instance1.setInstanceId(customInsId); InstanceUtil.setInstanceIdIfEmpty(instance1, groupedServiceName); assertEquals(instance1.getInstanceId(), customInsId); }
@Override public ListenableFuture<List<EntitySubtype>> findTenantAssetTypesAsync(UUID tenantId) { return service.submit(() -> convertTenantEntityInfosToDto(tenantId, EntityType.ASSET, assetProfileRepository.findActiveTenantAssetProfileNames(tenantId))); }
@Test public void testFindTenantAssetTypesAsync() throws ExecutionException, InterruptedException, TimeoutException { // Assets with type "TYPE_1" added in setUp method assets.add(saveAsset(Uuids.timeBased(), tenantId1, customerId1, "TEST_ASSET_3", "TYPE_2")); assets.add(saveAsset(Uuids.timeBased(), tenantId1, customerId1, "TEST_ASSET_4", "TYPE_3")); assets.add(saveAsset(Uuids.timeBased(), tenantId1, customerId1, "TEST_ASSET_5", "TYPE_3")); assets.add(saveAsset(Uuids.timeBased(), tenantId1, customerId1, "TEST_ASSET_6", "TYPE_3")); assets.add(saveAsset(Uuids.timeBased(), tenantId2, customerId2, "TEST_ASSET_7", "TYPE_4")); List<EntitySubtype> tenant1Types = assetDao.findTenantAssetTypesAsync(tenantId1).get(30, TimeUnit.SECONDS); assertNotNull(tenant1Types); List<EntitySubtype> tenant2Types = assetDao.findTenantAssetTypesAsync(tenantId2).get(30, TimeUnit.SECONDS); assertNotNull(tenant2Types); List<String> types = List.of("default", "TYPE_1", "TYPE_2", "TYPE_3", "TYPE_4"); assertEquals(getDifferentTypesCount(types, tenant1Types), tenant1Types.size()); assertEquals(getDifferentTypesCount(types, tenant2Types), tenant2Types.size()); }
public WsResponse call(WsRequest request) { checkState(!globalMode.isMediumTest(), "No WS call should be made in medium test mode"); WsResponse response = target.wsConnector().call(request); failIfUnauthorized(response); checkAuthenticationWarnings(response); return response; }
@Test public void call_whenUnauthenticatedAndDebugEnabled_shouldLogResponseDetails() { WsRequest request = newRequest(); server.stubFor(get(urlEqualTo(URL_ENDPOINT)) .willReturn(aResponse() .withStatus(401) .withBody("Missing authentication") .withHeader("X-Test-Header", "ImATestHeader"))); logTester.setLevel(Level.DEBUG); DefaultScannerWsClient client = new DefaultScannerWsClient(wsClient, false, new GlobalAnalysisMode(new ScannerProperties(Collections.emptyMap())), analysisWarnings); assertThatThrownBy(() -> client.call(request)) .isInstanceOf(MessageException.class) .hasMessage("Not authorized. Analyzing this project requires authentication. Please check the user token in the property 'sonar.token' " + "or the credentials in the properties 'sonar.login' and 'sonar.password'."); List<String> debugLogs = logTester.logs(Level.DEBUG); assertThat(debugLogs).hasSize(3); assertThat(debugLogs.get(2)).matches("Error response content: Missing authentication, headers: \\{.*x-test-header=\\[ImATestHeader\\].*"); }
@Override public Long time(RedisClusterNode node) { RedisClient entry = getEntry(node); RFuture<Long> f = executorService.readAsync(entry, LongCodec.INSTANCE, RedisCommands.TIME_LONG); return syncFuture(f); }
@Test public void testTime() { RedisClusterNode master = getFirstMaster(); Long time = connection.time(master); assertThat(time).isGreaterThan(1000); }
@Inject public FileMergeCacheManager( CacheConfig cacheConfig, FileMergeCacheConfig fileMergeCacheConfig, CacheStats stats, ExecutorService cacheFlushExecutor, ExecutorService cacheRemovalExecutor, ScheduledExecutorService cacheSizeCalculateExecutor) { requireNonNull(cacheConfig, "directory is null"); this.cacheFlushExecutor = cacheFlushExecutor; this.cacheRemovalExecutor = cacheRemovalExecutor; this.cacheSizeCalculateExecutor = cacheSizeCalculateExecutor; this.cache = CacheBuilder.newBuilder() .maximumSize(fileMergeCacheConfig.getMaxCachedEntries()) .expireAfterAccess(fileMergeCacheConfig.getCacheTtl().toMillis(), MILLISECONDS) .removalListener(new CacheRemovalListener()) .recordStats() .build(); this.stats = requireNonNull(stats, "stats is null"); this.baseDirectory = new Path(cacheConfig.getBaseDirectory()); checkArgument(fileMergeCacheConfig.getMaxInMemoryCacheSize().toBytes() >= 0, "maxInflightBytes is negative"); this.maxInflightBytes = fileMergeCacheConfig.getMaxInMemoryCacheSize().toBytes(); File target = new File(baseDirectory.toUri()); if (!target.exists()) { try { Files.createDirectories(target.toPath()); } catch (IOException e) { throw new PrestoException(GENERIC_INTERNAL_ERROR, "cannot create cache directory " + target, e); } } else { File[] files = target.listFiles(); if (files == null) { return; } this.cacheRemovalExecutor.submit(() -> Arrays.stream(files).forEach(file -> { try { Files.delete(file.toPath()); } catch (IOException e) { // ignore } })); } this.cacheSizeCalculateExecutor.scheduleAtFixedRate( () -> { try { cacheScopeFiles.keySet().forEach(cacheIdentifier -> cacheScopeSizeInBytes.put(cacheIdentifier, getCacheScopeSizeInBytes(cacheIdentifier))); cacheScopeSizeInBytes.keySet().removeIf(key -> !cacheScopeFiles.containsKey(key)); } catch (Throwable t) { log.error(t, "Error calculating cache size"); } }, 0, 15, TimeUnit.SECONDS); }
@Test(timeOut = 30_000) public void testBasic() throws InterruptedException, ExecutionException, IOException { TestingCacheStats stats = new TestingCacheStats(); CacheManager cacheManager = fileMergeCacheManager(stats); byte[] buffer = new byte[1024]; // new read assertFalse(readFully(cacheManager, NO_CACHE_CONSTRAINTS, 42, buffer, 0, 100)); assertEquals(stats.getCacheMiss(), 1); assertEquals(stats.getCacheHit(), 0); stats.trigger(); assertEquals(stats.getInMemoryRetainedBytes(), 0); validateBuffer(data, 42, buffer, 0, 100); // within the range of the cache assertTrue(readFully(cacheManager, NO_CACHE_CONSTRAINTS, 47, buffer, 0, 90)); assertEquals(stats.getCacheMiss(), 1); assertEquals(stats.getCacheHit(), 1); assertEquals(stats.getInMemoryRetainedBytes(), 0); validateBuffer(data, 47, buffer, 0, 90); // partially within the range of the cache assertFalse(readFully(cacheManager, NO_CACHE_CONSTRAINTS, 52, buffer, 0, 100)); assertEquals(stats.getCacheMiss(), 2); assertEquals(stats.getCacheHit(), 1); stats.trigger(); assertEquals(stats.getInMemoryRetainedBytes(), 0); validateBuffer(data, 52, buffer, 0, 100); // partially within the range of the cache assertFalse(readFully(cacheManager, NO_CACHE_CONSTRAINTS, 32, buffer, 10, 50)); assertEquals(stats.getCacheMiss(), 3); assertEquals(stats.getCacheHit(), 1); stats.trigger(); assertEquals(stats.getInMemoryRetainedBytes(), 0); validateBuffer(data, 32, buffer, 10, 50); // create a hole within two caches assertFalse(readFully(cacheManager, NO_CACHE_CONSTRAINTS, 200, buffer, 40, 50)); assertEquals(stats.getCacheMiss(), 4); assertEquals(stats.getCacheHit(), 1); stats.trigger(); assertEquals(stats.getInMemoryRetainedBytes(), 0); validateBuffer(data, 200, buffer, 40, 50); // use a range to cover the hole assertFalse(readFully(cacheManager, NO_CACHE_CONSTRAINTS, 40, buffer, 400, 200)); assertEquals(stats.getCacheMiss(), 5); assertEquals(stats.getCacheHit(), 1); stats.trigger(); assertEquals(stats.getInMemoryRetainedBytes(), 0); validateBuffer(data, 40, buffer, 400, 200); }
@Override public boolean setNniLink(String target) { DriverHandler handler = handler(); NetconfController controller = handler.get(NetconfController.class); MastershipService mastershipService = handler.get(MastershipService.class); DeviceId ncDeviceId = handler.data().deviceId(); checkNotNull(controller, "Netconf controller is null"); if (!mastershipService.isLocalMaster(ncDeviceId)) { log.warn("Not master for {} Use {} to execute command", ncDeviceId, mastershipService.getMasterFor(ncDeviceId)); return false; } String[] data = target.split(COLON); if (data.length != THREE) { log.error("Invalid number of arguments {}", target); return false; } try { int nni = Integer.parseInt(data[FIRST_PART]); if (nni <= ZERO) { log.error("Invalid integer for nnilink-id:{}", target); return false; } } catch (NumberFormatException e) { log.error("Non-number input for nnilink-id:{}", target); return false; } if (!checkSetParam(data[SECOND_PART], data[THIRD_PART])) { log.error("Failed to check input {}", target); return false; } try { StringBuilder request = new StringBuilder(); request.append(VOLT_NE_OPEN + VOLT_NE_NAMESPACE) .append(ANGLE_RIGHT + NEW_LINE) .append(buildStartTag(VOLT_PORTS)) .append(buildStartTag(ETH_NNILINK_PORTS)) .append(buildStartTag(ETH_NNILINK_PORT)) .append(buildStartTag(NNILINK_ID, false)) .append(data[FIRST_PART]) .append(buildEndTag(NNILINK_ID)) .append(buildStartTag(data[SECOND_PART], false)) .append(data[THIRD_PART]) .append(buildEndTag(data[SECOND_PART])) .append(buildEndTag(ETH_NNILINK_PORT)) .append(buildEndTag(ETH_NNILINK_PORTS)) .append(buildEndTag(VOLT_PORTS)) .append(VOLT_NE_CLOSE); controller.getDevicesMap() .get(ncDeviceId) .getSession() .editConfig(RUNNING, null, request.toString()); } catch (NetconfException e) { log.error("Cannot communicate to device {} exception {}", ncDeviceId, e); return false; } return true; }
@Test public void testInvalidSetNniLinkInput() throws Exception { String target; boolean result; for (int i = ZERO; i < INVALID_SET_TCS.length; i++) { target = INVALID_SET_TCS[i]; result = voltConfig.setNniLink(target); assertFalse("Incorrect response for INVALID_SET_TCS", result); } }
public CoercedExpressionResult coerce() { final Class<?> leftClass = left.getRawClass(); final Class<?> nonPrimitiveLeftClass = toNonPrimitiveType(leftClass); final Class<?> rightClass = right.getRawClass(); final Class<?> nonPrimitiveRightClass = toNonPrimitiveType(rightClass); boolean sameClass = leftClass == rightClass; boolean isUnificationExpression = left instanceof UnificationTypedExpression || right instanceof UnificationTypedExpression; if (sameClass || isUnificationExpression) { return new CoercedExpressionResult(left, right); } if (!canCoerce()) { throw new CoercedExpressionException(new InvalidExpressionErrorResult("Comparison operation requires compatible types. Found " + leftClass + " and " + rightClass)); } if ((nonPrimitiveLeftClass == Integer.class || nonPrimitiveLeftClass == Long.class) && nonPrimitiveRightClass == Double.class) { CastExpr castExpression = new CastExpr(PrimitiveType.doubleType(), this.left.getExpression()); return new CoercedExpressionResult( new TypedExpression(castExpression, double.class, left.getType()), right, false); } final boolean leftIsPrimitive = leftClass.isPrimitive() || Number.class.isAssignableFrom( leftClass ); final boolean canCoerceLiteralNumberExpr = canCoerceLiteralNumberExpr(leftClass); boolean rightAsStaticField = false; final Expression rightExpression = right.getExpression(); final TypedExpression coercedRight; if (leftIsPrimitive && canCoerceLiteralNumberExpr && rightExpression instanceof LiteralStringValueExpr) { final Expression coercedLiteralNumberExprToType = coerceLiteralNumberExprToType((LiteralStringValueExpr) right.getExpression(), leftClass); coercedRight = right.cloneWithNewExpression(coercedLiteralNumberExprToType); coercedRight.setType( leftClass ); } else if (shouldCoerceBToString(left, right)) { coercedRight = coerceToString(right); } else if (isNotBinaryExpression(right) && canBeNarrowed(leftClass, rightClass) && right.isNumberLiteral()) { coercedRight = castToClass(leftClass); } else if (leftClass == long.class && rightClass == int.class) { coercedRight = right.cloneWithNewExpression(new CastExpr(PrimitiveType.longType(), right.getExpression())); } else if (leftClass == Date.class && rightClass == String.class) { coercedRight = coerceToDate(right); rightAsStaticField = true; } else if (leftClass == LocalDate.class && rightClass == String.class) { coercedRight = coerceToLocalDate(right); rightAsStaticField = true; } else if (leftClass == LocalDateTime.class && rightClass == String.class) { coercedRight = coerceToLocalDateTime(right); rightAsStaticField = true; } else if (shouldCoerceBToMap()) { coercedRight = castToClass(toNonPrimitiveType(leftClass)); } else if (isBoolean(leftClass) && !isBoolean(rightClass)) { coercedRight = coerceBoolean(right); } else { coercedRight = right; } final TypedExpression coercedLeft; if (nonPrimitiveLeftClass == Character.class && shouldCoerceBToString(right, left)) { coercedLeft = coerceToString(left); } else { coercedLeft = left; } return new CoercedExpressionResult(coercedLeft, coercedRight, rightAsStaticField); }
@Test public void testIntegerToBooleanError() { final TypedExpression left = expr(THIS_PLACEHOLDER + ".getBooleanValue", Boolean.class); final TypedExpression right = expr("1", Integer.class); assertThatThrownBy(() -> new CoercedExpression(left, right, false).coerce()) .isInstanceOf(CoercedExpression.CoercedExpressionException.class); }
@Override public Optional<ShardingSchemaTableAggregationReviser> getSchemaTableAggregationReviser(final ConfigurationProperties props) { return Optional.of(new ShardingSchemaTableAggregationReviser(props.getValue(ConfigurationPropertyKey.CHECK_TABLE_METADATA_ENABLED))); }
@Test void assertGetSchemaTableAggregationReviser() { Optional<ShardingSchemaTableAggregationReviser> schemaTableAggregationReviser = reviseEntry.getSchemaTableAggregationReviser(new ConfigurationProperties(null)); assertTrue(schemaTableAggregationReviser.isPresent()); assertThat(schemaTableAggregationReviser.get().getClass(), is(ShardingSchemaTableAggregationReviser.class)); }
private TimeoutCountDown(long timeout, TimeUnit unit) { timeoutInMillis = TimeUnit.MILLISECONDS.convert(timeout, unit); deadlineInNanos = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeout, unit); }
@Test void testTimeoutCountDown() throws InterruptedException { TimeoutCountDown timeoutCountDown = TimeoutCountDown.newCountDown(5, TimeUnit.SECONDS); Assertions.assertEquals(5 * 1000, timeoutCountDown.getTimeoutInMilli()); Assertions.assertFalse(timeoutCountDown.isExpired()); Assertions.assertTrue(timeoutCountDown.timeRemaining(TimeUnit.SECONDS) > 0); Assertions.assertTrue(timeoutCountDown.elapsedMillis() < TimeUnit.MILLISECONDS.convert(5, TimeUnit.SECONDS)); Thread.sleep(6 * 1000); Assertions.assertTrue(timeoutCountDown.isExpired()); Assertions.assertTrue(timeoutCountDown.timeRemaining(TimeUnit.SECONDS) <= 0); Assertions.assertTrue(timeoutCountDown.elapsedMillis() > TimeUnit.MILLISECONDS.convert(5, TimeUnit.SECONDS)); }
@NonNull public static List<VideoStream> getSortedStreamVideosList( @NonNull final Context context, @Nullable final List<VideoStream> videoStreams, @Nullable final List<VideoStream> videoOnlyStreams, final boolean ascendingOrder, final boolean preferVideoOnlyStreams) { final SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(context); final boolean showHigherResolutions = preferences.getBoolean( context.getString(R.string.show_higher_resolutions_key), false); final MediaFormat defaultFormat = getDefaultFormat(context, R.string.default_video_format_key, R.string.default_video_format_value); return getSortedStreamVideosList(defaultFormat, showHigherResolutions, videoStreams, videoOnlyStreams, ascendingOrder, preferVideoOnlyStreams); }
@Test public void getSortedStreamVideosExceptHighResolutionsTest() { //////////////////////////////////// // Don't show Higher resolutions // ////////////////////////////////// final List<VideoStream> result = ListHelper.getSortedStreamVideosList(MediaFormat.MPEG_4, false, VIDEO_STREAMS_TEST_LIST, VIDEO_ONLY_STREAMS_TEST_LIST, false, false); final List<String> expected = List.of( "1080p60", "1080p", "720p60", "720p", "480p", "360p", "240p", "144p"); assertEquals(expected.size(), result.size()); for (int i = 0; i < result.size(); i++) { assertEquals(expected.get(i), result.get(i).getResolution()); } }
@Override @SuppressFBWarnings(value = "EI_EXPOSE_REP") public KsqlConfig getKsqlConfig() { return ksqlConfig; }
@Test public void shouldWriteConfigIfNoConfigWritten() { // Given: expectRead(consumerBefore); addPollResult(KafkaConfigStore.CONFIG_MSG_KEY, properties); expectRead(consumerAfter); // When: getKsqlConfig(); // Then: verifyDrainLog(consumerBefore, 0); verifyProduce(); }
public LogicalSchema resolve(final ExecutionStep<?> step, final LogicalSchema schema) { return Optional.ofNullable(HANDLERS.get(step.getClass())) .map(h -> h.handle(this, schema, step)) .orElseThrow(() -> new IllegalStateException("Unhandled step class: " + step.getClass())); }
@Test public void shouldResolveSchemaForStreamAggregate() { // Given: givenAggregateFunction("SUM"); final StreamAggregate step = new StreamAggregate( PROPERTIES, groupedStreamSource, formats, ImmutableList.of(ColumnName.of("ORANGE")), ImmutableList.of(functionCall("SUM", "APPLE")) ); // When: final LogicalSchema result = resolver.resolve(step, SCHEMA); // Then: assertThat(result, is( LogicalSchema.builder() .keyColumn(ColumnName.of("K0"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("ORANGE"), SqlTypes.INTEGER) .valueColumn(ColumnNames.aggregateColumn(0), SqlTypes.BIGINT) .build()) ); }
@Override public void run() { if (isAlreadyRunning.compareAndSet(false, true)) { if (throwable != null) { // Capturing & throwing the exception to propagate the failure to the scheduler (suppress future executions) // instead of hiding in the delegated executor. rethrow(throwable); return; } executor.execute(runnable); } }
@Test public void givenTheTaskIsAlreadyRunning_whenThreadAttemptToExecuteIt_theExutionWillBeSkipped() throws InterruptedException { final ResumableCountingRunnable task = new ResumableCountingRunnable(); final AtomicInteger counter = new AtomicInteger(); SynchronousQueue<Runnable> queue = new SynchronousQueue<>() { @Override public boolean offer(Runnable runnable) { counter.incrementAndGet(); return super.offer(runnable); } }; ThreadPoolExecutor executor = new ThreadPoolExecutor(0, Integer.MAX_VALUE, 60L, TimeUnit.SECONDS, queue); //start first task DelegateAndSkipOnConcurrentExecutionDecorator decoratedTask = decorateAndInvokeRunOnDifferentThread(task, executor); //wait until the task is running task.awaitExecutionStarted(); //attempt to start execution from the test thread. this execution should be skipped -> it won't block decoratedTask.run(); //resume the original task task.resumeExecution(); assertEquals(1, task.getExecutionCount()); assertEquals(1, counter.get()); }
@Override public void configure(Map<String, ?> configs, boolean isKey) { if (listClass != null || inner != null) { log.error("Could not configure ListDeserializer as some parameters were already set -- listClass: {}, inner: {}", listClass, inner); throw new ConfigException("List deserializer was already initialized using a non-default constructor"); } configureListClass(configs, isKey); configureInnerSerde(configs, isKey); }
@Test public void testListKeyDeserializerNoArgConstructorsShouldThrowConfigExceptionDueMissingInnerClassProp() { props.put(CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_TYPE_CLASS, ArrayList.class); final ConfigException exception = assertThrows( ConfigException.class, () -> listDeserializer.configure(props, true) ); assertEquals("Not able to determine the inner serde class because " + "it was neither passed via the constructor nor set in the config.", exception.getMessage()); }
public static Object convert(Class<?> expectedClass, Object originalObject) { if (originalObject == null) { return null; } Class<?> currentClass = originalObject.getClass(); if (expectedClass.isAssignableFrom(currentClass)) { return originalObject; } if (PrimitiveBoxedUtils.areSameWithBoxing(expectedClass, originalObject.getClass())) { // No cast/transformation originalObject return originalObject; } if (expectedClass == String.class) { return originalObject.toString(); } Object toReturn; String currentClassName = currentClass.getName(); switch (currentClassName) { case "java.lang.String": toReturn = convertFromString(expectedClass, (String) originalObject); break; case "int": case "java.lang.Integer": toReturn = convertFromInteger(expectedClass, (Integer) originalObject); break; case "double": case "java.lang.Double": toReturn = convertFromDouble(expectedClass, (Double) originalObject); break; case "float": case "java.lang.Float": toReturn = convertFromFloat(expectedClass, (Float) originalObject); break; default: throw new KiePMMLException(String.format(FAILED_CONVERSION, originalObject, expectedClass.getName())); } return toReturn; }
@Test void convertConvertibleFromDouble() { CONVERTIBLE_FROM_DOUBLE.forEach((s, expected) -> { Class<?> expectedClass = expected.getClass(); Object retrieved = ConverterTypeUtil.convert(expectedClass, s); assertThat(retrieved).isEqualTo(expected); }); }
@Nullable @Override public GenericRow decode(byte[] payload, GenericRow destination) { try { destination = (GenericRow) _decodeMethod.invoke(null, payload, destination); } catch (Exception e) { throw new RuntimeException(e); } return destination; }
@Test public void testHappyCase() throws Exception { ProtoBufCodeGenMessageDecoder messageDecoder = setupDecoder("sample.jar", "org.apache.pinot.plugin.inputformat.protobuf.Sample$SampleRecord", getFieldsInSampleRecord()); Sample.SampleRecord sampleRecord = getSampleRecordMessage(); GenericRow destination = new GenericRow(); messageDecoder.decode(sampleRecord.toByteArray(), destination); assertNotNull(destination.getValue("email")); assertNotNull(destination.getValue("name")); assertNotNull(destination.getValue("id")); assertEquals(destination.getValue("email"), "foobar@hello.com"); assertEquals(destination.getValue("name"), "Alice"); assertEquals(destination.getValue("id"), 18); }
@DeleteMapping @Secured(resource = AuthConstants.CONSOLE_RESOURCE_NAME_PREFIX + "roles", action = ActionTypes.WRITE) public Object deleteRole(@RequestParam String role, @RequestParam(name = "username", defaultValue = StringUtils.EMPTY) String username) { if (StringUtils.isBlank(username)) { roleService.deleteRole(role); } else { roleService.deleteRole(role, username); } return RestResultUtils.success("delete role of user " + username + " ok!"); }
@Test void testDeleteRole1() { RestResult<String> result = (RestResult<String>) roleController.deleteRole("test", null); verify(roleService, times(1)).deleteRole(anyString()); assertEquals(200, result.getCode()); }
public static <T> Collection<T> subtract(Collection<T> coll1, Collection<T> coll2) { if(isEmpty(coll1) || isEmpty(coll2)){ return coll1; } Collection<T> result = ObjectUtil.clone(coll1); try { if (null == result) { result = CollUtil.create(coll1.getClass()); result.addAll(coll1); } result.removeAll(coll2); } catch (UnsupportedOperationException e) { // 针对 coll1 为只读集合的补偿 result = CollUtil.create(AbstractCollection.class); result.addAll(coll1); result.removeAll(coll2); } return result; }
@Test public void subtractTest() { final List<String> list1 = CollUtil.newArrayList("a", "b", "b", "c", "d", "x"); final List<String> list2 = CollUtil.newArrayList("a", "b", "b", "b", "c", "d", "x2"); final Collection<String> subtract = CollUtil.subtract(list1, list2); assertEquals(1, subtract.size()); assertEquals("x", subtract.iterator().next()); }
public static Matches matches(String regex) { return matches(regex, 0); }
@Test @Category(NeedsRunner.class) public void testMatchesNameNone() { PCollection<String> output = p.apply(Create.of("a", "b", "c", "d")) .apply(Regex.matches("x (?<namedgroup>[xyz]*)", "namedgroup")); PAssert.that(output).empty(); p.run(); }
public static List<SourceToTargetMapping> getCurrentMappings( List<String> sourceFields, List<String> targetFields, List<MappingValueRename> mappingValues ) { List<SourceToTargetMapping> sourceToTargetMapping = new ArrayList<>( ); if ( sourceFields == null || targetFields == null || mappingValues == null ) { return sourceToTargetMapping; } if ( !mappingValues.isEmpty() ) { for ( MappingValueRename mappingValue : mappingValues ) { String source = mappingValue.getSourceValueName(); String target = mappingValue.getTargetValueName(); int sourceIndex = sourceFields.indexOf( source ); int targetIndex = targetFields.indexOf( target ); sourceToTargetMapping.add( new SourceToTargetMapping( sourceIndex, targetIndex ) ); } } return sourceToTargetMapping; }
@Test public void getCurrentMappingEmptyMappings() { List<SourceToTargetMapping> currentMapping = MappingUtil.getCurrentMappings( sourceFields, targetFields, new ArrayList<>( ) ); assertEquals( 0, currentMapping.size() ); }
@Override public Map<TopicPartition, Long> beginningOffsets(Collection<TopicPartition> partitions) { return beginningOffsets(partitions, Duration.ofMillis(defaultApiTimeoutMs)); }
@Test public void testBeginningOffsetsWithZeroTimeout() { consumer = newConsumer(); TopicPartition tp = new TopicPartition("topic1", 0); Map<TopicPartition, Long> result = assertDoesNotThrow(() -> consumer.beginningOffsets(Collections.singletonList(tp), Duration.ZERO)); // The result should be {tp=null} assertTrue(result.containsKey(tp)); assertNull(result.get(tp)); verify(applicationEventHandler).add(ArgumentMatchers.isA(ListOffsetsEvent.class)); }
@Override public boolean offerFirst(V e) { return get(offerFirstAsync(e)); }
@Test public void testOfferFirstOrigin() { Deque<Integer> queue = new ArrayDeque<Integer>(); queue.offerFirst(1); queue.offerFirst(2); queue.offerFirst(3); assertThat(queue).containsExactly(3, 2, 1); }
@Override public long readUnsignedIntLE() { return readIntLE() & 0xFFFFFFFFL; }
@Test public void testReadUnsignedIntLEAfterRelease() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().readUnsignedIntLE(); } }); }
public String doLayout(ILoggingEvent event) { if (!isStarted()) { return CoreConstants.EMPTY_STRING; } return writeLoopOnConverters(event); }
@Test public void smokeReplace() { pl.setPattern("%replace(a1234b){'\\d{4}', 'XXXX'}"); pl.start(); StatusPrinter.print(lc); String val = pl.doLayout(getEventObject()); assertEquals("aXXXXb", val); }
public Calendar ceil(long t) { Calendar cal = new GregorianCalendar(Locale.US); cal.setTimeInMillis(t); return ceil(cal); }
@Test public void testCeil1() throws Exception { CronTab x = new CronTab("0,30 * * * *"); Calendar c = new GregorianCalendar(2000, Calendar.MARCH, 1, 1, 10); compare(new GregorianCalendar(2000, Calendar.MARCH, 1, 1, 30), x.ceil(c)); // roll up test c = new GregorianCalendar(2000, Calendar.MARCH, 1, 1, 40); compare(new GregorianCalendar(2000, Calendar.MARCH, 1, 2, 0), x.ceil(c)); }
@Override public boolean isDirectory() { return gcsPath.endsWith("/"); }
@Test public void testIsDirectory() { assertTrue(toResourceIdentifier("gs://my_bucket/tmp dir/").isDirectory()); assertTrue(toResourceIdentifier("gs://my_bucket/").isDirectory()); assertTrue(toResourceIdentifier("gs://my_bucket").isDirectory()); assertFalse(toResourceIdentifier("gs://my_bucket/file").isDirectory()); }
@VisibleForTesting static UIntersectionType create(UExpression... bounds) { return create(ImmutableList.copyOf(bounds)); }
@Test public void equality() { new EqualsTester() .addEqualityGroup( UIntersectionType.create( UClassIdent.create("java.lang.CharSequence"), UClassIdent.create("java.io.Serializable"))) .addEqualityGroup( UIntersectionType.create( UClassIdent.create("java.lang.Number"), UClassIdent.create("java.io.Serializable"))) .testEquals(); }
@Override public <V> MultiLabel generateOutput(V label) { if (label instanceof Collection) { Collection<?> c = (Collection<?>) label; List<Pair<String,Boolean>> dimensions = new ArrayList<>(); for (Object o : c) { dimensions.add(MultiLabel.parseElement(o.toString())); } return MultiLabel.createFromPairList(dimensions); } return MultiLabel.parseString(label.toString()); }
@Test public void testGenerateOutput_str() { MultiLabelFactory factory = new MultiLabelFactory(); MultiLabel output = factory.generateOutput("a=true,b=true,c=true"); assertEquals(3, output.getLabelSet().size()); assertEquals("a,b,c", output.getLabelString()); output = factory.generateOutput("a,b,c"); assertEquals(3, output.getLabelSet().size()); assertEquals("a,b,c", output.getLabelString()); output = factory.generateOutput("a,b"); assertEquals(2, output.getLabelSet().size()); assertEquals("a,b", output.getLabelString()); output = factory.generateOutput("a"); assertEquals(1, output.getLabelSet().size()); assertEquals("a", output.getLabelString()); // // Boolean.parseBoolean("integer") resolves to false. output = factory.generateOutput("a=1,b=1,c=0"); assertEquals(0, output.getLabelSet().size()); assertEquals("", output.getLabelString()); }
public String process(final Expression expression) { return formatExpression(expression); }
@Test public void shouldProcessMapExpressionCorrectly() { // Given: final Expression expression = new SubscriptExpression(MAPCOL, new StringLiteral("key1")); // When: final String javaExpression = sqlToJavaVisitor.process(expression); // Then: assertThat(javaExpression, equalTo("((Double) (((java.util.Map) arguments.get(\"COL5\")) == null ? null : ((java.util.Map)((java.util.Map) arguments.get(\"COL5\"))).get(\"key1\")))")); }
@Override public <T> T invokeModuleFunction(String methodName, Object... argv) { return mEncryptAPIImpl.invokeModuleFunction(methodName, argv); }
@Test public void invokeModuleFunction() { SAHelper.initSensors(mApplication); SAEncryptProtocolImpl encryptProtocol = new SAEncryptProtocolImpl(); encryptProtocol.install(SensorsDataAPI.sharedInstance(mApplication).getSAContextManager()); encryptProtocol.invokeModuleFunction(Modules.Encrypt.METHOD_LOAD_SECRET_KEY); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final AttributedList<Path> children = new AttributedList<Path>(); try (RemoteDirectory handle = session.sftp().openDir(directory.getAbsolute())) { for(List<RemoteResourceInfo> list : ListUtils.partition(handle.scan(new RemoteResourceFilter() { @Override public boolean accept(RemoteResourceInfo remoteResourceInfo) { return true; } }), new HostPreferences(session.getHost()).getInteger("sftp.listing.chunksize"))) { for(RemoteResourceInfo f : list) { final PathAttributes attr = attributes.toAttributes(f.getAttributes()); final EnumSet<Path.Type> type = EnumSet.noneOf(Path.Type.class); switch(f.getAttributes().getType()) { case DIRECTORY: type.add(Path.Type.directory); break; case SYMLINK: type.add(Path.Type.symboliclink); break; default: type.add(Path.Type.file); break; } final Path file = new Path(directory, f.getName(), type, attr); if(this.post(file)) { children.add(file); listener.chunk(directory, children); } } } return children; } catch(IOException e) { throw new SFTPExceptionMappingService().map("Listing directory {0} failed", e, directory); } }
@Test public void testList() throws Exception { final Path home = new SFTPHomeDirectoryService(session).find(); final String filename = String.format("%s%s", new AlphanumericRandomStringService().random(), new NFDNormalizer().normalize("ä")); final Path file = new Path(home, filename, EnumSet.of(Path.Type.file)); final Path symlinkRelative = new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file, AbstractPath.Type.symboliclink)); final Path symlinkAbsolute = new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file, AbstractPath.Type.symboliclink)); final Path directory = new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); new SFTPTouchFeature(session).touch(file, new TransferStatus()); new SFTPSymlinkFeature(session).symlink(symlinkRelative, file.getName()); new SFTPSymlinkFeature(session).symlink(symlinkAbsolute, file.getAbsolute()); new SFTPDirectoryFeature(session).mkdir(directory, new TransferStatus()); final Permission permission = new Permission(Permission.Action.read_write, Permission.Action.read_write, Permission.Action.read_write); new SFTPUnixPermissionFeature(session).setUnixPermission(file, permission); final AttributedList<Path> list = new SFTPListService(session).list(home, new DisabledListProgressListener()); assertTrue(list.contains(file)); assertEquals(permission, list.get(file).attributes().getPermission()); assertTrue(list.contains(directory)); assertTrue(list.contains(symlinkRelative)); assertEquals(file, list.get(symlinkRelative).getSymlinkTarget()); assertTrue(list.contains(symlinkAbsolute)); assertEquals(file, list.get(symlinkAbsolute).getSymlinkTarget()); new SFTPDeleteFeature(session).delete(Arrays.asList(file, symlinkAbsolute, symlinkRelative, directory), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public void createWorkers(int numberOfWorkers, TaskSet taskSet, TaskHandler taskHandler) { for (var id = 1; id <= numberOfWorkers; id++) { var worker = new Worker(id, this, taskSet, taskHandler); workers.add(worker); } promoteLeader(); }
@Test void testCreateWorkers() { var taskSet = new TaskSet(); var taskHandler = new TaskHandler(); var workCenter = new WorkCenter(); workCenter.createWorkers(5, taskSet, taskHandler); assertEquals(5, workCenter.getWorkers().size()); assertEquals(workCenter.getWorkers().get(0), workCenter.getLeader()); }
public static Getter newFieldGetter(Object object, Getter parent, Field field, String modifier) throws Exception { return newGetter(object, parent, modifier, field.getType(), field::get, (t, et) -> new FieldGetter(parent, field, modifier, t, et)); }
@Test public void newFieldGetter_whenExtractingFromNonEmpty_Array_FieldAndParentIsNonEmptyMultiResult_thenInferReturnType() throws Exception { OuterObject object = new OuterObject("name", new InnerObject("inner", 0, 1, 2, 3)); Getter parentGetter = GetterFactory.newFieldGetter(object, null, innersArrayField, "[any]"); Getter innerObjectNameGetter = GetterFactory.newFieldGetter(object, parentGetter, innerAttributesArrayField, "[any]"); Class<?> returnType = innerObjectNameGetter.getReturnType(); assertEquals(Integer.class, returnType); }
ConnectorStatus.Listener wrapStatusListener(ConnectorStatus.Listener delegateListener) { return new ConnectorStatusListener(delegateListener); }
@Test public void testConnectorFailureAfterStartupRecordedMetrics() { WorkerMetricsGroup workerMetricsGroup = new WorkerMetricsGroup(new HashMap<>(), new HashMap<>(), connectMetrics); final ConnectorStatus.Listener connectorListener = workerMetricsGroup.wrapStatusListener(delegateConnectorListener); connectorListener.onStartup(connector); connectorListener.onFailure(connector, exception); verify(delegateConnectorListener).onStartup(connector); verifyRecordConnectorStartupSuccess(); verify(delegateConnectorListener).onFailure(connector, exception); // recordConnectorStartupFailure() should not be called if failure happens after a successful startup. verify(connectorStartupFailures, never()).record(anyDouble()); }
public List<CompactionTask> produce() { // get all CF files sorted by key range start (L1+) List<SstFileMetaData> sstSortedByCfAndStartingKeys = metadataSupplier.get().stream() .filter(l -> l.level() > 0) // let RocksDB deal with L0 .sorted(SST_COMPARATOR) .collect(Collectors.toList()); LOG.trace("Input files: {}", sstSortedByCfAndStartingKeys.size()); List<CompactionTask> tasks = groupIntoTasks(sstSortedByCfAndStartingKeys); tasks.sort(Comparator.<CompactionTask>comparingInt(t -> t.files.size()).reversed()); return tasks.subList(0, Math.min(tasks.size(), settings.maxManualCompactions)); }
@Test void testMaxFilesToCompact() { assertThat( produce( configBuilder().setMaxFilesToCompact(1).build(), sstBuilder().build(), sstBuilder().build())) .hasSize(2); }
@Override public void execute() throws Exception { LOG.debug("Executing map task"); try (Closeable stateCloser = executionStateTracker.activate()) { try { // Start operations, in reverse-execution-order, so that a // consumer is started before a producer might output to it. // Starting a root operation such as a ReadOperation does the work // of processing the input dataset. LOG.debug("Starting operations"); ListIterator<Operation> iterator = operations.listIterator(operations.size()); while (iterator.hasPrevious()) { if (Thread.currentThread().isInterrupted()) { throw new InterruptedException("Worker aborted"); } Operation op = iterator.previous(); op.start(); } // Finish operations, in forward-execution-order, so that a // producer finishes outputting to its consumers before those // consumers are themselves finished. LOG.debug("Finishing operations"); for (Operation op : operations) { if (Thread.currentThread().isInterrupted()) { throw new InterruptedException("Worker aborted"); } op.finish(); } } catch (Exception | Error exn) { LOG.debug("Aborting operations", exn); for (Operation op : operations) { try { op.abort(); } catch (Exception | Error exn2) { exn.addSuppressed(exn2); if (exn2 instanceof InterruptedException) { Thread.currentThread().interrupt(); } } } throw exn; } } LOG.debug("Map task execution complete"); // TODO: support for success / failure ports? }
@Test public void testExecuteMapTaskExecutor() throws Exception { Operation o1 = Mockito.mock(Operation.class); Operation o2 = Mockito.mock(Operation.class); Operation o3 = Mockito.mock(Operation.class); List<Operation> operations = Arrays.asList(new Operation[] {o1, o2, o3}); ExecutionStateTracker stateTracker = Mockito.spy(ExecutionStateTracker.newForTest()); try (MapTaskExecutor executor = new MapTaskExecutor(operations, counterSet, stateTracker)) { executor.execute(); } InOrder inOrder = Mockito.inOrder(stateTracker, o1, o2, o3); inOrder.verify(stateTracker).activate(); inOrder.verify(o3).start(); inOrder.verify(o2).start(); inOrder.verify(o1).start(); inOrder.verify(o1).finish(); inOrder.verify(o2).finish(); inOrder.verify(o3).finish(); inOrder.verify(stateTracker).deactivate(); }
public AggregateAnalysisResult analyze( final ImmutableAnalysis analysis, final List<SelectExpression> finalProjection ) { if (!analysis.getGroupBy().isPresent()) { throw new IllegalArgumentException("Not an aggregate query"); } final AggAnalyzer aggAnalyzer = new AggAnalyzer(analysis, functionRegistry); aggAnalyzer.process(finalProjection); return aggAnalyzer.result(); }
@Test public void shouldCaptureDefaultFunctionArguments() { // Given: final FunctionCall emptyFunc = new FunctionCall(FunctionName.of("COUNT"), new ArrayList<>()); givenSelectExpression(emptyFunc); // When: final AggregateAnalysisResult result = analyzer.analyze(analysis, selects); // Then: assertThat(result.getRequiredColumns(), hasItem(DEFAULT_ARGUMENT)); assertThat(result.getAggregateFunctionArguments(), hasItem(DEFAULT_ARGUMENT)); }
@Override public void deleteTag(Long id) { // 校验存在 validateTagExists(id); // 校验标签下是否有用户 validateTagHasUser(id); // 删除 memberTagMapper.deleteById(id); }
@Test public void testDeleteTag_notExists() { // 准备参数 Long id = randomLongId(); // 调用, 并断言异常 assertServiceException(() -> tagService.deleteTag(id), TAG_NOT_EXISTS); }
@DoNotSub public int hashCode() { @DoNotSub int hashCode = 0; for (final int value : values) { if (MISSING_VALUE != value) { hashCode += Integer.hashCode(value); } } if (containsMissingValue) { hashCode += Integer.hashCode(MISSING_VALUE); } return hashCode; }
@Test void twoEmptySetsHaveTheSameHashcode() { assertEquals(testSet.hashCode(), new IntHashSet(100).hashCode()); }
@Override public Object getObject(String key) { return variables.getObject(key); }
@Test public void testGetObject() { assertThat(unmodifiables.getObject(MY_OBJECT_KEY), CoreMatchers.is(vars.getObject(MY_OBJECT_KEY))); }
public static void unZip(InputStream inputStream, File toDir) throws IOException { try (ZipArchiveInputStream zip = new ZipArchiveInputStream(inputStream)) { int numOfFailedLastModifiedSet = 0; String targetDirPath = toDir.getCanonicalPath() + File.separator; for(ZipArchiveEntry entry = zip.getNextZipEntry(); entry != null; entry = zip.getNextZipEntry()) { if (!entry.isDirectory()) { File file = new File(toDir, entry.getName()); if (!file.getCanonicalPath().startsWith(targetDirPath)) { throw new IOException("expanding " + entry.getName() + " would create file outside of " + toDir); } File parent = file.getParentFile(); if (!parent.mkdirs() && !parent.isDirectory()) { throw new IOException("Mkdirs failed to create " + parent.getAbsolutePath()); } try (OutputStream out = Files.newOutputStream(file.toPath())) { IOUtils.copyBytes(zip, out, BUFFER_SIZE); } if (!file.setLastModified(entry.getTime())) { numOfFailedLastModifiedSet++; } if (entry.getPlatform() == ZipArchiveEntry.PLATFORM_UNIX) { Files.setPosixFilePermissions(file.toPath(), permissionsFromMode(entry.getUnixMode())); } } } if (numOfFailedLastModifiedSet > 0) { LOG.warn("Could not set last modfied time for {} file(s)", numOfFailedLastModifiedSet); } } }
@Test (timeout = 30000) public void testUnZip() throws Exception { // make sa simple zip final File simpleZip = new File(del, FILE); try (OutputStream os = new FileOutputStream(simpleZip); ZipArchiveOutputStream tos = new ZipArchiveOutputStream(os)) { List<ZipArchiveEntry> ZipArchiveList = new ArrayList<>(7); int count = 0; // create 7 files to verify permissions for (int i = 0; i < 7; i++) { ZipArchiveList.add(new ZipArchiveEntry("foo_" + i)); ZipArchiveEntry archiveEntry = ZipArchiveList.get(i); archiveEntry.setUnixMode(count += 0100); byte[] data = "some-content".getBytes(StandardCharsets.UTF_8); archiveEntry.setSize(data.length); tos.putArchiveEntry(archiveEntry); tos.write(data); } tos.closeArchiveEntry(); tos.flush(); tos.finish(); } // successfully unzip it into an existing dir: FileUtil.unZip(simpleZip, tmp); File foo0 = new File(tmp, "foo_0"); File foo1 = new File(tmp, "foo_1"); File foo2 = new File(tmp, "foo_2"); File foo3 = new File(tmp, "foo_3"); File foo4 = new File(tmp, "foo_4"); File foo5 = new File(tmp, "foo_5"); File foo6 = new File(tmp, "foo_6"); // check result: assertTrue(foo0.exists()); assertTrue(foo1.exists()); assertTrue(foo2.exists()); assertTrue(foo3.exists()); assertTrue(foo4.exists()); assertTrue(foo5.exists()); assertTrue(foo6.exists()); assertEquals(12, foo0.length()); // tests whether file foo_0 has executable permissions assertTrue("file lacks execute permissions", foo0.canExecute()); assertFalse("file has write permissions", foo0.canWrite()); assertFalse("file has read permissions", foo0.canRead()); // tests whether file foo_1 has writable permissions assertFalse("file has execute permissions", foo1.canExecute()); assertTrue("file lacks write permissions", foo1.canWrite()); assertFalse("file has read permissions", foo1.canRead()); // tests whether file foo_2 has executable and writable permissions assertTrue("file lacks execute permissions", foo2.canExecute()); assertTrue("file lacks write permissions", foo2.canWrite()); assertFalse("file has read permissions", foo2.canRead()); // tests whether file foo_3 has readable permissions assertFalse("file has execute permissions", foo3.canExecute()); assertFalse("file has write permissions", foo3.canWrite()); assertTrue("file lacks read permissions", foo3.canRead()); // tests whether file foo_4 has readable and executable permissions assertTrue("file lacks execute permissions", foo4.canExecute()); assertFalse("file has write permissions", foo4.canWrite()); assertTrue("file lacks read permissions", foo4.canRead()); // tests whether file foo_5 has readable and writable permissions assertFalse("file has execute permissions", foo5.canExecute()); assertTrue("file lacks write permissions", foo5.canWrite()); assertTrue("file lacks read permissions", foo5.canRead()); // tests whether file foo_6 has readable, writable and executable permissions assertTrue("file lacks execute permissions", foo6.canExecute()); assertTrue("file lacks write permissions", foo6.canWrite()); assertTrue("file lacks read permissions", foo6.canRead()); final File regularFile = Verify.createNewFile(new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog")); LambdaTestUtils.intercept(IOException.class, () -> FileUtil.unZip(simpleZip, regularFile)); }
@Override public ListConsumerGroupsResult listConsumerGroups(ListConsumerGroupsOptions options) { final KafkaFutureImpl<Collection<Object>> all = new KafkaFutureImpl<>(); final long nowMetadata = time.milliseconds(); final long deadline = calcDeadlineMs(nowMetadata, options.timeoutMs()); runnable.call(new Call("findAllBrokers", deadline, new LeastLoadedNodeProvider()) { @Override MetadataRequest.Builder createRequest(int timeoutMs) { return new MetadataRequest.Builder(new MetadataRequestData() .setTopics(Collections.emptyList()) .setAllowAutoTopicCreation(true)); } @Override void handleResponse(AbstractResponse abstractResponse) { MetadataResponse metadataResponse = (MetadataResponse) abstractResponse; Collection<Node> nodes = metadataResponse.brokers(); if (nodes.isEmpty()) throw new StaleMetadataException("Metadata fetch failed due to missing broker list"); HashSet<Node> allNodes = new HashSet<>(nodes); final ListConsumerGroupsResults results = new ListConsumerGroupsResults(allNodes, all); for (final Node node : allNodes) { final long nowList = time.milliseconds(); runnable.call(new Call("listConsumerGroups", deadline, new ConstantNodeIdProvider(node.id())) { @Override ListGroupsRequest.Builder createRequest(int timeoutMs) { List<String> states = options.states() .stream() .map(ConsumerGroupState::toString) .collect(Collectors.toList()); List<String> groupTypes = options.types() .stream() .map(GroupType::toString) .collect(Collectors.toList()); return new ListGroupsRequest.Builder(new ListGroupsRequestData() .setStatesFilter(states) .setTypesFilter(groupTypes) ); } private void maybeAddConsumerGroup(ListGroupsResponseData.ListedGroup group) { String protocolType = group.protocolType(); if (protocolType.equals(ConsumerProtocol.PROTOCOL_TYPE) || protocolType.isEmpty()) { final String groupId = group.groupId(); final Optional<ConsumerGroupState> state = group.groupState().isEmpty() ? Optional.empty() : Optional.of(ConsumerGroupState.parse(group.groupState())); final Optional<GroupType> type = group.groupType().isEmpty() ? Optional.empty() : Optional.of(GroupType.parse(group.groupType())); final ConsumerGroupListing groupListing = new ConsumerGroupListing( groupId, protocolType.isEmpty(), state, type ); results.addListing(groupListing); } } @Override void handleResponse(AbstractResponse abstractResponse) { final ListGroupsResponse response = (ListGroupsResponse) abstractResponse; synchronized (results) { Errors error = Errors.forCode(response.data().errorCode()); if (error == Errors.COORDINATOR_LOAD_IN_PROGRESS || error == Errors.COORDINATOR_NOT_AVAILABLE) { throw error.exception(); } else if (error != Errors.NONE) { results.addError(error.exception(), node); } else { for (ListGroupsResponseData.ListedGroup group : response.data().groups()) { maybeAddConsumerGroup(group); } } results.tryComplete(node); } } @Override void handleFailure(Throwable throwable) { synchronized (results) { results.addError(throwable, node); results.tryComplete(node); } } }, nowList); } } @Override void handleFailure(Throwable throwable) { KafkaException exception = new KafkaException("Failed to find brokers to send ListGroups", throwable); all.complete(Collections.singletonList(exception)); } }, nowMetadata); return new ListConsumerGroupsResult(all); }
@Test public void testListConsumerGroupsMetadataFailure() throws Exception { final Cluster cluster = mockCluster(3, 0); final Time time = new MockTime(); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(time, cluster, AdminClientConfig.RETRIES_CONFIG, "0")) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); // Empty metadata causes the request to fail since we have no list of brokers // to send the ListGroups requests to env.kafkaClient().prepareResponse( RequestTestUtils.metadataResponse( Collections.emptyList(), env.cluster().clusterResource().clusterId(), -1, Collections.emptyList())); final ListConsumerGroupsResult result = env.adminClient().listConsumerGroups(); TestUtils.assertFutureError(result.all(), KafkaException.class); } }
@Override public boolean tryLock() { return get(tryLockAsync()); }
@Test public void testRedisFailed() { GenericContainer<?> redis = createRedis(); redis.start(); Config config = createConfig(redis); RedissonClient redisson = Redisson.create(config); Assertions.assertThrows(RedisException.class, () -> { RLock lock = redisson.getLock("myLock"); // kill RedisServer while main thread is sleeping. redis.stop(); Thread.sleep(3000); lock.tryLock(5, 10, TimeUnit.SECONDS); }); redisson.shutdown(); }
public String getStringForDisplay() { if (isEmpty()) { return ""; } StringBuilder display = new StringBuilder(); for (IgnoredFiles ignoredFiles : this) { display.append(ignoredFiles.getPattern()).append(","); } return display.substring(0, display.length() - 1); }
@Test public void shouldReturnEmptyTextToDisplayWhenFilterIsEmpty() { assertThat(new Filter().getStringForDisplay(), is("")); }
static List<Locale> negotiatePreferredLocales(String headerValue) { if (headerValue == null || headerValue.isBlank()) { headerValue = DEFAULT_LOCALE.toLanguageTag(); } try { var languageRanges = Locale.LanguageRange.parse(headerValue); return Locale.filter(languageRanges, supportedLocales); } catch (IllegalArgumentException e) { throw new ValidationException(new Message("error.unparsableHeader")); } }
@Test void test_negotiatePreferredLocales_noValidSizeZero() { var locales = LocaleUtils.negotiatePreferredLocales("el-GR;q=0.5,ja-JP;q=0.8"); assertThat(locales.size(), is(0)); }
@Override public Flux<Plugin> getPresets() { // list presets from classpath return Flux.defer(() -> getPresetJars() .map(this::toPath) .map(path -> new YamlPluginFinder().find(path))); }
@Test void getPresetsTest() { var presets = pluginService.getPresets(); StepVerifier.create(presets) .assertNext(plugin -> { assertEquals("fake-plugin", plugin.getMetadata().getName()); assertEquals("0.0.2", plugin.getSpec().getVersion()); assertEquals(Plugin.Phase.PENDING, plugin.getStatus().getPhase()); }) .verifyComplete(); }
public int[] findMatchingLines(List<String> left, List<String> right) { int[] index = new int[right.size()]; int dbLine = left.size(); int reportLine = right.size(); try { PathNode node = new MyersDiff<String>().buildPath(left, right); while (node.prev != null) { PathNode prevNode = node.prev; if (!node.isSnake()) { // additions reportLine -= (node.j - prevNode.j); // removals dbLine -= (node.i - prevNode.i); } else { // matches for (int i = node.i; i > prevNode.i; i--) { index[reportLine - 1] = dbLine; reportLine--; dbLine--; } } node = prevNode; } } catch (DifferentiationFailedException e) { LOG.error("Error finding matching lines", e); return index; } return index; }
@Test public void shouldIgnoreDeletedLinesInTheMiddleOfFile() { List<String> database = new ArrayList<>(); database.add("line - 0"); database.add("line - 1"); database.add("line - 2"); database.add("line - 3"); database.add("line - 4"); database.add("line - 5"); List<String> report = new ArrayList<>(); report.add("line - 0"); report.add("line - 1"); report.add("line - 4"); report.add("line - 5"); int[] diff = new SourceLinesDiffFinder().findMatchingLines(database, report); assertThat(diff).containsExactly(1, 2, 5, 6); }
@Override public boolean isManagedIndex(String indexName) { return isManagedIndex(findAllMongoIndexSets(), indexName); }
@Test public void isManagedIndexWithManagedIndexReturnsTrue() { final IndexSetConfig indexSetConfig = mock(IndexSetConfig.class); final List<IndexSetConfig> indexSetConfigs = Collections.singletonList(indexSetConfig); final MongoIndexSet indexSet = mock(MongoIndexSet.class); when(mongoIndexSetFactory.create(indexSetConfig)).thenReturn(indexSet); when(indexSetService.findAll()).thenReturn(indexSetConfigs); when(indexSet.isManagedIndex("index")).thenReturn(true); assertThat(indexSetRegistry.isManagedIndex("index")).isTrue(); }
@Nullable public Bucket getBucket(GcsPath path) throws IOException { return getBucket(path, createBackOff(), Sleeper.DEFAULT); }
@Test public void testGetBucket() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow(new SocketTimeoutException("SocketException")) .thenReturn(new Bucket()); assertNotNull( gcsUtil.getBucket( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper()::sleep)); }
public static FunctionConfig validateUpdate(FunctionConfig existingConfig, FunctionConfig newConfig) { FunctionConfig mergedConfig = existingConfig.toBuilder().build(); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Function Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getJar())) { mergedConfig.setJar(newConfig.getJar()); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getCustomSerdeInputs() != null) { newConfig.getCustomSerdeInputs().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getCustomSchemaInputs() != null) { newConfig.getCustomSchemaInputs().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException( "isRegexPattern for input topic " + topicName + " cannot be altered"); } mergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (!StringUtils.isEmpty(newConfig.getOutputSerdeClassName()) && !newConfig.getOutputSerdeClassName() .equals(existingConfig.getOutputSerdeClassName())) { throw new IllegalArgumentException("Output Serde mismatch"); } if (!StringUtils.isEmpty(newConfig.getOutputSchemaType()) && !newConfig.getOutputSchemaType() .equals(existingConfig.getOutputSchemaType())) { throw new IllegalArgumentException("Output Schema mismatch"); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees() .equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering() .equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering() .equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (!StringUtils.isEmpty(newConfig.getOutput())) { mergedConfig.setOutput(newConfig.getOutput()); } if (newConfig.getUserConfig() != null) { mergedConfig.setUserConfig(newConfig.getUserConfig()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getRuntime() != null && !newConfig.getRuntime().equals(existingConfig.getRuntime())) { throw new IllegalArgumentException("Runtime cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getMaxMessageRetries() != null) { mergedConfig.setMaxMessageRetries(newConfig.getMaxMessageRetries()); } if (!StringUtils.isEmpty(newConfig.getDeadLetterTopic())) { mergedConfig.setDeadLetterTopic(newConfig.getDeadLetterTopic()); } if (!StringUtils.isEmpty(newConfig.getSubName()) && !newConfig.getSubName() .equals(existingConfig.getSubName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getResources() != null) { mergedConfig .setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getWindowConfig() != null) { mergedConfig.setWindowConfig(newConfig.getWindowConfig()); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } if (newConfig.getProducerConfig() != null) { mergedConfig.setProducerConfig(newConfig.getProducerConfig()); } return mergedConfig; }
@Test public void testMergeDifferentTimeout() { FunctionConfig functionConfig = createFunctionConfig(); FunctionConfig newFunctionConfig = createUpdatedFunctionConfig("timeoutMs", 102L); FunctionConfig mergedConfig = FunctionConfigUtils.validateUpdate(functionConfig, newFunctionConfig); assertEquals( mergedConfig.getTimeoutMs(), Long.valueOf(102L) ); mergedConfig.setTimeoutMs(functionConfig.getTimeoutMs()); assertEquals( new Gson().toJson(functionConfig), new Gson().toJson(mergedConfig) ); }
public static String extractAppIdFromMasterRoleName(String masterRoleName) { Iterator<String> parts = STRING_SPLITTER.split(masterRoleName).iterator(); // skip role type if (parts.hasNext() && parts.next().equals(RoleType.MASTER) && parts.hasNext()) { return parts.next(); } return null; }
@Test public void testExtractAppIdFromMasterRoleName() throws Exception { assertEquals("someApp", RoleUtils.extractAppIdFromMasterRoleName("Master+someApp")); assertEquals("someApp", RoleUtils.extractAppIdFromMasterRoleName("Master+someApp+xx")); assertNull(RoleUtils.extractAppIdFromMasterRoleName("ReleaseNamespace+app1+application")); }
public DistributedTransactionOperationType getDistributedTransactionOperationType(final boolean autoCommit) { if (!autoCommit && !distributionTransactionManager.isInTransaction()) { return DistributedTransactionOperationType.BEGIN; } if (autoCommit && distributionTransactionManager.isInTransaction()) { return DistributedTransactionOperationType.COMMIT; } return DistributedTransactionOperationType.IGNORE; }
@Test void assertDistributedTransactionOperationTypeIgnore() { connectionTransaction = new ConnectionTransaction(getXATransactionRule(), new TransactionConnectionContext()); DistributedTransactionOperationType operationType = connectionTransaction.getDistributedTransactionOperationType(false); assertThat(operationType, is(DistributedTransactionOperationType.IGNORE)); }
@GetMapping("/apps/search/by-appid-or-name") public PageDTO<App> search(@RequestParam(value = "query", required = false) String query, Pageable pageable) { if (StringUtils.isEmpty(query)) { return appService.findAll(pageable); } //search app PageDTO<App> appPageDTO = appService.searchByAppIdOrAppName(query, pageable); if (appPageDTO.hasContent()) { return appPageDTO; } if (!portalConfig.supportSearchByItem()) { return new PageDTO<>(Lists.newLinkedList(), pageable, 0); } //search item return searchByItem(query, pageable); }
@Test public void testSearchItemSwitch() { String query = "timeout"; PageRequest request = PageRequest.of(0, 20); PageDTO<App> apps = new PageDTO<>(Lists.newLinkedList(), request, 0); when(appService.searchByAppIdOrAppName(query, request)).thenReturn(apps); when(portalConfig.supportSearchByItem()).thenReturn(false); PageDTO<App> result = searchController.search(query, request); Assert.assertFalse(result.hasContent()); verify(appService, times(0)).findAll(request); verify(appService, times(1)).searchByAppIdOrAppName(query, request); }
BackgroundJobRunner getBackgroundJobRunner(Job job) { assertJobExists(job.getJobDetails()); return backgroundJobRunners.stream() .filter(jobRunner -> jobRunner.supports(job)) .findFirst() .orElseThrow(() -> problematicConfigurationException("Could not find a BackgroundJobRunner: either no JobActivator is registered, your Background Job Class is not registered within the IoC container or your Job does not have a default no-arg constructor.")); }
@Test void getBackgroundJobRunnerForIoCJobWithoutInstance() { final Job job = anEnqueuedJob() .<TestService>withJobDetails(ts -> ts.doWork()) .build(); assertThat(backgroundJobServer.getBackgroundJobRunner(job)) .isNotNull() .isInstanceOf(BackgroundJobWithIocRunner.class); }
@Override public boolean tryFence(HAServiceTarget target, String args) { ProcessBuilder builder; String cmd = parseArgs(target.getTransitionTargetHAStatus(), args); if (!Shell.WINDOWS) { builder = new ProcessBuilder("bash", "-e", "-c", cmd); } else { builder = new ProcessBuilder("cmd.exe", "/c", cmd); } setConfAsEnvVars(builder.environment()); addTargetInfoAsEnvVars(target, builder.environment()); Process p; try { p = builder.start(); p.getOutputStream().close(); } catch (IOException e) { LOG.warn("Unable to execute " + cmd, e); return false; } String pid = tryGetPid(p); LOG.info("Launched fencing command '" + cmd + "' with " + ((pid != null) ? ("pid " + pid) : "unknown pid")); String logPrefix = abbreviate(cmd, ABBREV_LENGTH); if (pid != null) { logPrefix = "[PID " + pid + "] " + logPrefix; } // Pump logs to stderr StreamPumper errPumper = new StreamPumper( LOG, logPrefix, p.getErrorStream(), StreamPumper.StreamType.STDERR); errPumper.start(); StreamPumper outPumper = new StreamPumper( LOG, logPrefix, p.getInputStream(), StreamPumper.StreamType.STDOUT); outPumper.start(); int rc; try { rc = p.waitFor(); errPumper.join(); outPumper.join(); } catch (InterruptedException ie) { LOG.warn("Interrupted while waiting for fencing command: " + cmd); return false; } return rc == 0; }
@Test(timeout=10000) public void testSubprocessInputIsClosed() { assertFalse(fencer.tryFence(TEST_TARGET, "read")); }
public static void carryWithResponse(RpcInvokeContext context, SofaResponse response) { if (context != null) { Map<String, String> responseBaggage = context.getAllResponseBaggage(); if (CommonUtils.isNotEmpty(responseBaggage)) { String prefix = RemotingConstants.RPC_RESPONSE_BAGGAGE + "."; for (Map.Entry<String, String> entry : responseBaggage.entrySet()) { response.addResponseProp(prefix + entry.getKey(), entry.getValue()); } } } }
@Test public void testCarryWithResponse() { SofaResponse sofaResponse = new SofaResponse(); RpcInvokeContext context = RpcInvokeContext.getContext(); context.putResponseBaggage(KEY, VALUE); BaggageResolver.carryWithResponse(context, sofaResponse); String baggage = (String) sofaResponse.getResponseProp(RemotingConstants.RPC_RESPONSE_BAGGAGE + "." + KEY); assertNotNull(baggage); assertEquals(VALUE, baggage); }
@Override public void subscribeService(Service service, Subscriber subscriber, String clientId) { // Subscriber is an ephemeral type only, so call ephemeral client directly ephemeralClientOperationService.subscribeService(service, subscriber, clientId); }
@Test void testSubscribeService() { clientOperationServiceProxy.subscribeService(service, subscriber, ephemeralIpPortId); verify(ephemeralClientOperationServiceImpl).subscribeService(service, subscriber, ephemeralIpPortId); verify(persistentClientOperationServiceImpl, never()).subscribeService(service, subscriber, ephemeralIpPortId); }
public boolean add(CompoundStat stat) { return add(stat, null); }
@Test public void testExpiredSensor() { MetricConfig config = new MetricConfig(); Time mockTime = new MockTime(); try (Metrics metrics = new Metrics(config, Collections.singletonList(new JmxReporter()), mockTime, true)) { long inactiveSensorExpirationTimeSeconds = 60L; Sensor sensor = new Sensor(metrics, "sensor", null, config, mockTime, inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel.INFO); assertTrue(sensor.add(metrics.metricName("test1", "grp1"), new Avg())); Map<String, String> emptyTags = Collections.emptyMap(); MetricName rateMetricName = new MetricName("rate", "test", "", emptyTags); MetricName totalMetricName = new MetricName("total", "test", "", emptyTags); Meter meter = new Meter(rateMetricName, totalMetricName); assertTrue(sensor.add(meter)); mockTime.sleep(TimeUnit.SECONDS.toMillis(inactiveSensorExpirationTimeSeconds + 1)); assertFalse(sensor.add(metrics.metricName("test3", "grp1"), new Avg())); assertFalse(sensor.add(meter)); } }