focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static Ip4Prefix valueOf(int address, int prefixLength) { return new Ip4Prefix(Ip4Address.valueOf(address), prefixLength); }
@Test public void testValueOfForIntegerIPv4() { Ip4Prefix ipPrefix; ipPrefix = Ip4Prefix.valueOf(0x01020304, 24); assertThat(ipPrefix.toString(), is("1.2.3.0/24")); ipPrefix = Ip4Prefix.valueOf(0x01020304, 32); assertThat(ipPrefix.toString(), is("1.2.3.4/32")); ipPrefix = Ip4Prefix.valueOf(0x01020305, 32); assertThat(ipPrefix.toString(), is("1.2.3.5/32")); ipPrefix = Ip4Prefix.valueOf(0, 0); assertThat(ipPrefix.toString(), is("0.0.0.0/0")); ipPrefix = Ip4Prefix.valueOf(0, 32); assertThat(ipPrefix.toString(), is("0.0.0.0/32")); ipPrefix = Ip4Prefix.valueOf(0xffffffff, 0); assertThat(ipPrefix.toString(), is("0.0.0.0/0")); ipPrefix = Ip4Prefix.valueOf(0xffffffff, 16); assertThat(ipPrefix.toString(), is("255.255.0.0/16")); ipPrefix = Ip4Prefix.valueOf(0xffffffff, 32); assertThat(ipPrefix.toString(), is("255.255.255.255/32")); }
public static String formatTimeTakenMs(long startTimeMs, String message) { return message + " took " + (CommonUtils.getCurrentMs() - startTimeMs) + " ms."; }
@Test public void formatTimeTakenMs() { class TestCase { Pattern mExpected; String mInputMessage; public TestCase(String expectedRE, String inputMessage) { mExpected = Pattern.compile(expectedRE); mInputMessage = inputMessage; } } List<TestCase> testCases = new ArrayList<>(); testCases.add(new TestCase("^Task A took (.*) ms.$", "Task A")); testCases.add(new TestCase("^Task B took (.*) ms.$", "Task B")); long delta = 100; for (TestCase testCase : testCases) { String result = FormatUtils.formatTimeTakenMs(CommonUtils.getCurrentMs() - delta, testCase.mInputMessage); Matcher match = testCase.mExpected.matcher(result); assertTrue(match.matches()); assertTrue(delta <= Long.parseLong(match.group(1))); assertTrue(Long.parseLong(match.group(1)) <= 2 * delta); } }
@CanIgnoreReturnValue public final Ordered containsAtLeastEntriesIn(Multimap<?, ?> expectedMultimap) { checkNotNull(expectedMultimap, "expectedMultimap"); checkNotNull(actual); ListMultimap<?, ?> missing = difference(expectedMultimap, actual); // TODO(kak): Possible enhancement: Include "[1 copy]" if the element does appear in // the subject but not enough times. Similarly for unexpected extra items. if (!missing.isEmpty()) { failWithActual( fact("missing", countDuplicatesMultimap(annotateEmptyStringsMultimap(missing))), simpleFact("---"), fact("expected to contain at least", annotateEmptyStringsMultimap(expectedMultimap))); return ALREADY_FAILED; } return new MultimapInOrder(/* allowUnexpected = */ true, expectedMultimap); }
@Test public void containsAtLeastFailureMissing() { ImmutableMultimap<Integer, String> expected = ImmutableMultimap.of(3, "one", 3, "six", 3, "two", 4, "five", 4, "four"); ListMultimap<Integer, String> actual = LinkedListMultimap.create(expected); actual.remove(3, "six"); actual.remove(4, "five"); actual.put(50, "hawaii"); expectFailureWhenTestingThat(actual).containsAtLeastEntriesIn(expected); assertFailureKeys("missing", "---", "expected to contain at least", "but was"); assertFailureValue("missing", "{3=[six], 4=[five]}"); }
@Override protected void decode(final ChannelHandlerContext ctx, final ByteBuf in, final List<Object> out) { MySQLPacketPayload payload = new MySQLPacketPayload(in, ctx.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get()); decodeCommandPacket(payload, out); }
@Test void assertDecodeQueryCommPacket() { MySQLCommandPacketDecoder commandPacketDecoder = new MySQLCommandPacketDecoder(); List<Object> actual = new LinkedList<>(); commandPacketDecoder.decode(channelHandlerContext, mockEmptyResultSetPacket(), actual); commandPacketDecoder.decode(channelHandlerContext, mockFieldDefinition41Packet(), actual); commandPacketDecoder.decode(channelHandlerContext, mockEofPacket(), actual); commandPacketDecoder.decode(channelHandlerContext, mockEmptyResultSetPacket(), actual); commandPacketDecoder.decode(channelHandlerContext, mockEofPacket(), actual); assertPacketByType(actual, InternalResultSet.class); }
@Override public void process(HealthCheckTaskV2 task, Service service, ClusterMetadata metadata) { String type = metadata.getHealthyCheckType(); HealthCheckProcessorV2 processor = healthCheckProcessorMap.get(type); if (processor == null) { processor = healthCheckProcessorMap.get(NoneHealthCheckProcessor.TYPE); } processor.process(task, service, metadata); }
@Test void testProcess() throws NoSuchFieldException, IllegalAccessException { testAddProcessor(); when(clusterMetadata.getHealthyCheckType()).thenReturn(HealthCheckType.TCP.name()); when(healthCheckTaskV2.getClient()).thenReturn(new IpPortBasedClient("127.0.0.1:80#true", true)); healthCheckProcessorV2Delegate.process(healthCheckTaskV2, service, clusterMetadata); verify(clusterMetadata).getHealthyCheckType(); verify(healthCheckTaskV2).getClient(); }
@Override public String getDescription() { return DESCRIPTION; }
@Test public void getDescription() { assertThat(underTest.getDescription()).isEqualTo(HandleUnanalyzedLanguagesStep.DESCRIPTION); }
AccessTokenRetriever getAccessTokenRetriever() { return accessTokenRetriever; }
@Test public void testConfigureWithAccessClientCredentials() { OAuthBearerLoginCallbackHandler handler = new OAuthBearerLoginCallbackHandler(); Map<String, ?> configs = getSaslConfigs(SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL, "http://www.example.com"); Map<String, Object> jaasConfigs = new HashMap<>(); jaasConfigs.put(CLIENT_ID_CONFIG, "an ID"); jaasConfigs.put(CLIENT_SECRET_CONFIG, "a secret"); configureHandler(handler, configs, jaasConfigs); assertInstanceOf(HttpAccessTokenRetriever.class, handler.getAccessTokenRetriever()); }
public TopicRouteData getTopicRouteInfoFromNameServer(final String topic, final long timeoutMillis) throws RemotingException, MQClientException, InterruptedException { return getTopicRouteInfoFromNameServer(topic, timeoutMillis, true); }
@Test public void assertGetTopicRouteInfoFromNameServer() throws RemotingException, InterruptedException, MQClientException { mockInvokeSync(); TopicRouteData responseBody = new TopicRouteData(); responseBody.getQueueDatas().add(new QueueData()); responseBody.getBrokerDatas().add(new BrokerData()); responseBody.getFilterServerTable().put("key", Collections.emptyList()); Map<String, TopicQueueMappingInfo> topicQueueMappingByBroker = new HashMap<>(); topicQueueMappingByBroker.put("key", new TopicQueueMappingInfo()); responseBody.setTopicQueueMappingByBroker(topicQueueMappingByBroker); setResponseBody(responseBody); TopicRouteData actual = mqClientAPI.getTopicRouteInfoFromNameServer(defaultTopic, defaultTimeout); assertNotNull(actual); assertEquals(1, actual.getQueueDatas().size()); assertEquals(1, actual.getBrokerDatas().size()); assertEquals(1, actual.getFilterServerTable().size()); assertEquals(1, actual.getTopicQueueMappingByBroker().size()); }
public void allocationQuantum(int allocationQuantum) { checkPositive(allocationQuantum, "allocationQuantum"); this.allocationQuantum = allocationQuantum; }
@Test public void writeShouldFavorPriority() throws Http2Exception { // Root the streams at the connection and assign weights. setPriority(STREAM_A, 0, (short) 50, false); setPriority(STREAM_B, 0, (short) 200, false); setPriority(STREAM_C, 0, (short) 100, false); setPriority(STREAM_D, 0, (short) 100, false); initState(STREAM_A, 1000, true); initState(STREAM_B, 1000, true); initState(STREAM_C, 1000, false); initState(STREAM_D, 1000, false); // Set allocation quantum to 1 so it is easier to see the ratio of total bytes written between each stream. distributor.allocationQuantum(1); assertTrue(write(100)); assertEquals(20, captureWrites(STREAM_A)); verifyWrite(times(20), STREAM_A, 1); assertEquals(80, captureWrites(STREAM_B)); verifyWrite(times(0), STREAM_B, 1); verifyNeverWrite(STREAM_C); verifyNeverWrite(STREAM_D); assertTrue(write(100)); assertEquals(40, captureWrites(STREAM_A)); verifyWrite(times(40), STREAM_A, 1); assertEquals(160, captureWrites(STREAM_B)); verifyWrite(atMost(1), STREAM_B, 1); verifyNeverWrite(STREAM_C); verifyNeverWrite(STREAM_D); assertTrue(write(1050)); assertEquals(250, captureWrites(STREAM_A)); verifyWrite(times(250), STREAM_A, 1); assertEquals(1000, captureWrites(STREAM_B)); verifyWrite(atMost(2), STREAM_B, 1); verifyNeverWrite(STREAM_C); verifyNeverWrite(STREAM_D); assertFalse(write(750)); assertEquals(1000, captureWrites(STREAM_A)); verifyWrite(times(1), STREAM_A, 750); assertEquals(1000, captureWrites(STREAM_B)); verifyWrite(times(0), STREAM_B, 0); verifyNeverWrite(STREAM_C); verifyNeverWrite(STREAM_D); }
@VisibleForTesting void parseWorkflowParameter( Map<String, Parameter> workflowParams, Parameter param, String workflowId) { parseWorkflowParameter(workflowParams, param, workflowId, new HashSet<>()); }
@Test public void testParseWorkflowParameterWithImplicitToStringArray() { StringArrayParameter bar = StringArrayParameter.builder().name("bar").expression("foo;").build(); paramEvaluator.parseWorkflowParameter( Collections.singletonMap( "foo", LongArrayParameter.builder().expression("return new long[]{1, 2,3};").build()), bar, "test-workflow"); assertArrayEquals(new String[] {"1", "2", "3"}, bar.getEvaluatedResult()); assertEquals( "Implicitly converted the evaluated result to a string array for type class [J", bar.getMeta().get("info")); paramEvaluator.parseWorkflowParameter( Collections.singletonMap( "foo", LongArrayParameter.builder() .evaluatedResult(new long[] {1, 2, 3}) .evaluatedTime(123L) .build()), bar, "test-workflow"); assertArrayEquals(new String[] {"1", "2", "3"}, bar.getEvaluatedResult()); assertEquals( "Implicitly converted the evaluated result to a string array for type class [J", bar.getMeta().get("info")); }
@VisibleForTesting String addPredicates(TimelineReaderContext context, String collectionName, StringBuilder queryStrBuilder) { boolean hasPredicate = false; queryStrBuilder.append(WHERE_CLAUSE); if (!DocumentStoreUtils.isNullOrEmpty(context.getClusterId())) { hasPredicate = true; queryStrBuilder.append(String.format(CONTAINS_FUNC_FOR_ID, context.getClusterId())); } if (!DocumentStoreUtils.isNullOrEmpty(context.getUserId())) { hasPredicate = true; queryStrBuilder.append(AND_OPERATOR) .append(String.format(CONTAINS_FUNC_FOR_ID, context.getUserId())); } if (!DocumentStoreUtils.isNullOrEmpty(context.getFlowName())) { hasPredicate = true; queryStrBuilder.append(AND_OPERATOR) .append(String.format(CONTAINS_FUNC_FOR_ID, context.getFlowName())); } if (!DocumentStoreUtils.isNullOrEmpty(context.getAppId())) { hasPredicate = true; queryStrBuilder.append(AND_OPERATOR) .append(String.format(CONTAINS_FUNC_FOR_ID, context.getAppId())); } if (!DocumentStoreUtils.isNullOrEmpty(context.getEntityId())) { hasPredicate = true; queryStrBuilder.append(AND_OPERATOR) .append(String.format(CONTAINS_FUNC_FOR_ID, context.getEntityId())); } if (context.getFlowRunId() != null) { hasPredicate = true; queryStrBuilder.append(AND_OPERATOR) .append(String.format(CONTAINS_FUNC_FOR_ID, context.getFlowRunId())); } if (!DocumentStoreUtils.isNullOrEmpty(context.getEntityType())){ hasPredicate = true; queryStrBuilder.append(AND_OPERATOR) .append(String.format(CONTAINS_FUNC_FOR_TYPE, context.getEntityType())); } if (hasPredicate) { queryStrBuilder.append(ORDER_BY_CLAUSE); LOG.debug("CosmosDB Sql Query with predicates : {}", queryStrBuilder); return queryStrBuilder.toString(); } throw new IllegalArgumentException("The TimelineReaderContext does not " + "have enough information to query documents for Collection : " + collectionName); }
@Test(expected = IllegalArgumentException.class) public void testFailureFOnEmptyPredicates() { PowerMockito.when(DocumentStoreUtils.isNullOrEmpty( ArgumentMatchers.any())) .thenReturn(Boolean.TRUE); CosmosDBDocumentStoreReader cosmosDBDocumentStoreReader = new CosmosDBDocumentStoreReader(null); cosmosDBDocumentStoreReader.addPredicates( new TimelineReaderContext(null, "", "", null, "", "", null), "DummyCollection", new StringBuilder()); }
@Override public void readLine(String line) { if (line.startsWith("#") || line.isEmpty()) { return; } // In some cases, ARIN may have multiple results with different NetType values. When that happens, // we want to use the data from the entry with the data closest to the customer actually using the IP. if (line.startsWith("NetType:")) { prevNetworkType = currNetworkType; currNetworkType = NetworkType.getEnum(lineValue(line)); if (null != currNetworkType && currNetworkType.isMoreSpecificThan(prevNetworkType)) { this.organization = null; this.countryCode = null; } } if((line.startsWith("Organization:") || line.startsWith("Customer:")) && this.organization == null) { this.organization = lineValue(line); } if(line.startsWith("Country:") && this.countryCode == null) { this.countryCode = lineValue(line); } if(line.startsWith("ResourceLink") && !line.contains("http")) { this.isRedirect = true; registryRedirect = findRegistryFromWhoisServer(lineValue(line)); } }
@Test public void testRunTwoMatches() throws Exception { ARINResponseParser parser = new ARINResponseParser(); for (String line : TWO_MATCH.split("\n")) { parser.readLine(line); } assertFalse(parser.isRedirect()); assertNull(parser.getRegistryRedirect()); assertEquals("US", parser.getCountryCode()); assertEquals("Cox Communications (C00898431)", parser.getOrganization()); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException { if(new DefaultPathPredicate(containerService.getContainer(file)).test(containerService.getContainer(renamed))) { // Either copy complete file contents (small file) or copy manifest (large file) final Path rename = proxy.copy(file, renamed, new TransferStatus().withLength(file.attributes().getSize()), connectionCallback, new DisabledStreamListener()); delete.delete(Collections.singletonMap(file, status), connectionCallback, callback, false); return rename; } else { final Path copy = new SwiftSegmentCopyService(session, regionService).copy(file, renamed, new TransferStatus().withLength(file.attributes().getSize()), connectionCallback, new DisabledStreamListener()); delete.delete(Collections.singletonMap(file, status), connectionCallback, callback); return copy; } }
@Test public void testMoveLargeObjectToSameBucket() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final Path originFolder = new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory)); final Path sourceFile = new Path(originFolder, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); final SwiftRegionService regionService = new SwiftRegionService(session); final SwiftSegmentService segmentService = new SwiftSegmentService(session, ".segments-test/"); prepareFile(sourceFile, regionService, segmentService); final SwiftFindFeature findFeature = new SwiftFindFeature(session); assertTrue(findFeature.find(sourceFile)); final List<Path> sourceSegments = segmentService.list(sourceFile); final Path targetFolder = new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory)); final Path targetFile = new Path(targetFolder, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); final Path movedFile = new SwiftMoveFeature(session, regionService).move(sourceFile, targetFile, new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); // source file does not exist anymore assertFalse(findFeature.find(sourceFile)); // moved file exists assertTrue(findFeature.find(movedFile)); final List<Path> targetSegments = segmentService.list(targetFile); assertTrue(sourceSegments.containsAll(targetSegments) && targetSegments.containsAll(sourceSegments)); new SwiftDeleteFeature(session, segmentService, regionService).delete( Collections.singletonMap(targetFile, new TransferStatus()), new DisabledPasswordCallback(), new Delete.DisabledCallback(), true); assertFalse(findFeature.find(movedFile)); assertArrayEquals(new PathAttributes[0], targetSegments.stream().filter(p -> { try { return findFeature.find(movedFile); } catch(BackgroundException e) { e.printStackTrace(); return false; } }).toArray()); }
@Override public T pollLast() { if (_tail == null) { return null; } return removeNode(_tail); }
@Test public void testEmptyPollLast() { LinkedDeque<Object> q = new LinkedDeque<>(); Assert.assertNull(q.pollLast(), "pollLast on empty queue should return null"); }
@Override public TTableDescriptor toThrift(List<DescriptorTable.ReferencedPartitionInfo> partitions) { TPaimonTable tPaimonTable = new TPaimonTable(); String encodedTable = PaimonScanNode.encodeObjectToString(paimonNativeTable); tPaimonTable.setPaimon_native_table(encodedTable); tPaimonTable.setTime_zone(TimeUtils.getSessionTimeZone()); TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.PAIMON_TABLE, fullSchema.size(), 0, tableName, databaseName); tTableDescriptor.setPaimonTable(tPaimonTable); return tTableDescriptor; }
@Test public void testToThrift(@Mocked FileStoreTable paimonNativeTable) { RowType rowType = RowType.builder().field("a", DataTypes.INT()).field("b", DataTypes.INT()).field("c", DataTypes.INT()) .build(); List<DataField> fields = rowType.getFields(); List<Column> fullSchema = new ArrayList<>(fields.size()); ArrayList<String> partitions = Lists.newArrayList("b", "c"); new Expectations() { { paimonNativeTable.rowType(); result = rowType; paimonNativeTable.partitionKeys(); result = partitions; } }; String dbName = "testDB"; String tableName = "testTable"; PaimonTable paimonTable = new PaimonTable("testCatalog", dbName, tableName, fullSchema, paimonNativeTable, 100L); TTableDescriptor tTableDescriptor = paimonTable.toThrift(null); Assert.assertEquals(tTableDescriptor.getDbName(), dbName); Assert.assertEquals(tTableDescriptor.getTableName(), tableName); }
@Override public Set<KubevirtRouter> routers() { return ImmutableSet.copyOf(kubevirtRouterStore.routers()); }
@Test public void testGetRouters() { createBasicRouters(); assertEquals("Number of router did not match", 1, target.routers().size()); }
@Override @Nonnull public UUID addMembershipListener(@Nonnull MembershipListener listener) { return clusterService.addMembershipListener(listener); }
@Test public void addMembershipListener() { UUID regId = client().getCluster().addMembershipListener(new MembershipAdapter()); assertNotNull(regId); }
public String getConfigId() { return configId; }
@Test public void testConfigId() { String namespace = "bar"; ConfigKey<?> key1 = new ConfigKey<>("foo", "a/b/c", namespace); ConfigKey<?> key2 = new ConfigKey<>("foo", "a/b/c", namespace); assertEquals(key1, key2); ConfigKey<?> key3 = new ConfigKey<>("foo", "a/b/c/d", namespace); assertNotEquals(key1, key3); assertEquals("a/b/c", new ConfigKey<>("foo", "a/b/c", namespace).getConfigId()); assertEquals("a", new ConfigKey<>("foo", "a", namespace).getConfigId()); assertEquals("", new ConfigKey<>("foo", "", namespace).getConfigId()); assertEquals(key1, key1); assertNotEquals(key1, key3); assertNotEquals(key1, new Object()); ConfigKey<?> key4 = new ConfigKey<>("myConfig", null, namespace); assertEquals("", key4.getConfigId()); }
@Override public ObjectNode encode(MaintenanceAssociation ma, CodecContext context) { checkNotNull(ma, "Maintenance Association cannot be null"); ObjectNode result = context.mapper().createObjectNode() .put(MA_NAME, ma.maId().toString()) .put(MA_NAME_TYPE, ma.maId().nameType().name()); if (ma.maNumericId() > 0) { result = result.put(MA_NUMERIC_ID, ma.maNumericId()); } if (ma.ccmInterval() != null) { result = result.put(CCM_INTERVAL, ma.ccmInterval().name()); } result.set(COMPONENT_LIST, new ComponentCodec().encode(ma.componentList(), context)); result.set(RMEP_LIST, new RMepCodec().encode(ma.remoteMepIdList(), context)); return result; }
@Test public void testEncodeMa2() throws CfmConfigException { MaintenanceAssociation ma1 = DefaultMaintenanceAssociation.builder(MAID2_VID, 10) .maNumericId((short) 2) .build(); ObjectNode node = mapper.createObjectNode(); node.set("ma", context.codec(MaintenanceAssociation.class).encode(ma1, context)); assertEquals("{\"ma\":{" + "\"maName\":\"1234\"," + "\"maNameType\":\"PRIMARYVID\"," + "\"maNumericId\":2," + "\"component-list\":[]," + "\"rmep-list\":[]}}", node.toString()); }
void handleStatement(final QueuedCommand queuedCommand) { throwIfNotConfigured(); handleStatementWithTerminatedQueries( queuedCommand.getAndDeserializeCommand(commandDeserializer), queuedCommand.getAndDeserializeCommandId(), queuedCommand.getStatus(), Mode.EXECUTE, queuedCommand.getOffset(), false ); }
@Test public void shouldBuildQueriesWithPersistedConfig() { // Given: final KsqlConfig originalConfig = new KsqlConfig( Collections.singletonMap( KsqlConfig.KSQL_PERSISTENT_QUERY_NAME_PREFIX_CONFIG, "not-the-default")); // get a statement instance final String ddlText = "CREATE STREAM pageviews (viewtime bigint, pageid varchar) " + "WITH (kafka_topic='pageview_topic', KEY_FORMAT='kafka', VALUE_FORMAT='json');"; final String statementText = "CREATE STREAM user1pv AS select * from pageviews WHERE userid = 'user1';"; final PreparedStatement<?> ddlStatement = statementParser.parseSingleStatement(ddlText); final ConfiguredStatement<?> configuredStatement = ConfiguredStatement .of(ddlStatement, SessionConfig.of(originalConfig, emptyMap())); ksqlEngine.execute(serviceContext, configuredStatement); when(mockQueryMetadata.getQueryId()).thenReturn(mock(QueryId.class)); final KsqlPlan plan = Mockito.mock(KsqlPlan.class); final Command csasCommand = new Command( statementText, emptyMap(), originalConfig.getAllConfigPropsWithSecretsObfuscated(), Optional.of(plan) ); final CommandId csasCommandId = new CommandId( CommandId.Type.STREAM, "_CSASGen", CommandId.Action.CREATE); when(mockEngine.execute(eq(serviceContext), eqConfiguredPlan(plan), eq(false))) .thenReturn(ExecuteResult.of(mockQueryMetadata)); // When: handleStatement(statementExecutorWithMocks, csasCommand, csasCommandId, Optional.empty(), 1); // Then: verify(mockQueryMetadata, times(1)).start(); }
public static Write write() { return Write.create(); }
@Test public void testWriteWithoutValidate() { final String table = "fooTable"; BigtableIO.Write write = BigtableIO.write() .withBigtableOptions(BIGTABLE_OPTIONS) .withTableId(table) .withoutValidation(); // validate() will throw if withoutValidation() isn't working write.validate(TestPipeline.testingPipelineOptions()); }
public static void verifyGroupId(final String groupId) { if (StringUtils.isBlank(groupId)) { throw new IllegalArgumentException("Blank groupId"); } if (!GROUP_ID_PATTER.matcher(groupId).matches()) { throw new IllegalArgumentException( "Invalid group id, it should be started with character 'a'-'z' or 'A'-'Z'," + " and followed with numbers, english alphabet, '-' or '_'. "); } }
@Test public void tetsVerifyGroupId5() { Utils.verifyGroupId("t"); Utils.verifyGroupId("T"); Utils.verifyGroupId("Test"); Utils.verifyGroupId("test"); Utils.verifyGroupId("test-hello"); Utils.verifyGroupId("test123"); Utils.verifyGroupId("t_hello"); }
@Override public InputStream fetch(String fetchKey, Metadata metadata, ParseContext parseContext) throws TikaException, IOException { int tries = 0; Exception ex; do { try { long start = System.currentTimeMillis(); String[] fetchKeySplit = fetchKey.split(","); String siteDriveId = fetchKeySplit[0]; String driveItemId = fetchKeySplit[1]; InputStream is = graphClient.drives().byDriveId(siteDriveId).items() .byDriveItemId(driveItemId).content().get(); long elapsed = System.currentTimeMillis() - start; LOGGER.debug("Total to fetch {}", elapsed); return is; } catch (Exception e) { LOGGER.warn("Exception fetching on retry=" + tries, e); ex = e; } LOGGER.warn("Sleeping for {} seconds before retry", throttleSeconds[tries]); try { Thread.sleep(throttleSeconds[tries]); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } while (++tries < throttleSeconds.length); throw new TikaException("Could not parse " + fetchKey, ex); }
@Test void fetch() throws Exception { try (AutoCloseable ignored = MockitoAnnotations.openMocks(this)) { Mockito.when(graphClient.drives()).thenReturn(drivesRequestBuilder); Mockito.when(drivesRequestBuilder.byDriveId(siteDriveId)) .thenReturn(driveItemRequestBuilder); Mockito.when(driveItemRequestBuilder.items()).thenReturn(itemsRequestBuilder); Mockito.when(itemsRequestBuilder.byDriveItemId(driveItemid)) .thenReturn(driveItemItemRequestBuilder); Mockito.when(driveItemItemRequestBuilder.content()).thenReturn(contentRequestBuilder); String content = "content"; Mockito.when(contentRequestBuilder.get()) .thenReturn(new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8))); InputStream resultingInputStream = microsoftGraphFetcher.fetch(siteDriveId + "," + driveItemid, new Metadata(), new ParseContext()); Assertions.assertEquals(content, IOUtils.toString(resultingInputStream, StandardCharsets.UTF_8)); } }
public static Expression convert(Predicate[] predicates) { Expression expression = Expressions.alwaysTrue(); for (Predicate predicate : predicates) { Expression converted = convert(predicate); Preconditions.checkArgument( converted != null, "Cannot convert Spark predicate to Iceberg expression: %s", predicate); expression = Expressions.and(expression, converted); } return expression; }
@Test public void testNotEqualToNaN() { String col = "col"; NamedReference namedReference = FieldReference.apply(col); LiteralValue value = new LiteralValue(Float.NaN, DataTypes.FloatType); org.apache.spark.sql.connector.expressions.Expression[] attrAndValue = new org.apache.spark.sql.connector.expressions.Expression[] {namedReference, value}; org.apache.spark.sql.connector.expressions.Expression[] valueAndAttr = new org.apache.spark.sql.connector.expressions.Expression[] {value, namedReference}; Predicate notEqNaN1 = new Predicate("<>", attrAndValue); Expression expectedNotEqNaN = Expressions.notNaN(col); Expression actualNotEqNaN1 = SparkV2Filters.convert(notEqNaN1); assertThat(actualNotEqNaN1.toString()).isEqualTo(expectedNotEqNaN.toString()); Predicate notEqNaN2 = new Predicate("<>", valueAndAttr); Expression actualNotEqNaN2 = SparkV2Filters.convert(notEqNaN2); assertThat(actualNotEqNaN2.toString()).isEqualTo(expectedNotEqNaN.toString()); }
public static Ip4Address valueOf(int value) { byte[] bytes = ByteBuffer.allocate(INET_BYTE_LENGTH).putInt(value).array(); return new Ip4Address(bytes); }
@Test(expected = NullPointerException.class) public void testInvalidValueOfNullArrayIPv4() { Ip4Address ipAddress; byte[] value; value = null; ipAddress = Ip4Address.valueOf(value); }
@Override public DictTypeDO getDictType(Long id) { return dictTypeMapper.selectById(id); }
@Test public void testGetDictType_type() { // mock 数据 DictTypeDO dbDictType = randomDictTypeDO(); dictTypeMapper.insert(dbDictType); // 准备参数 String type = dbDictType.getType(); // 调用 DictTypeDO dictType = dictTypeService.getDictType(type); // 断言 assertNotNull(dictType); assertPojoEquals(dbDictType, dictType); }
@PostMapping("/admin") public Object createAdminUser(@RequestParam(required = false) String password) { if (AuthSystemTypes.NACOS.name().equalsIgnoreCase(authConfigs.getNacosAuthSystemType())) { if (iAuthenticationManager.hasGlobalAdminRole()) { return RestResultUtils.failed(HttpStatus.CONFLICT.value(), "have admin user cannot use it"); } if (StringUtils.isBlank(password)) { password = PasswordGeneratorUtil.generateRandomPassword(); } String username = AuthConstants.DEFAULT_USER; userDetailsService.createUser(username, PasswordEncoderUtil.encode(password)); roleService.addAdminRole(username); ObjectNode result = JacksonUtils.createEmptyJsonNode(); result.put(AuthConstants.PARAM_USERNAME, username); result.put(AuthConstants.PARAM_PASSWORD, password); return result; } else { return RestResultUtils.failed(HttpStatus.NOT_IMPLEMENTED.value(), "not support"); } }
@Test void testCreateAdminUser3() { when(authConfigs.getNacosAuthSystemType()).thenReturn(AuthSystemTypes.NACOS.name()); when(authenticationManager.hasGlobalAdminRole()).thenReturn(false); ObjectNode result = (ObjectNode) userController.createAdminUser("test"); assertEquals("test", result.get(AuthConstants.PARAM_PASSWORD).asText()); }
@Override public String pathPattern() { return buildExtensionPathPattern(scheme) + "/{name}"; }
@Test void shouldBuildPathPatternCorrectly() { var scheme = Scheme.buildFromType(FakeExtension.class); var updateHandler = new ExtensionUpdateHandler(scheme, client); var pathPattern = updateHandler.pathPattern(); assertEquals("/apis/fake.halo.run/v1alpha1/fakes/{name}", pathPattern); }
@Override public double cdf(double x) { double e = Math.exp(-(x - mu) / scale); return 1.0 / (1.0 + e); }
@Test public void testCdf() { System.out.println("cdf"); LogisticDistribution instance = new LogisticDistribution(2.0, 1.0); instance.rand(); assertEquals(0.1193080, instance.cdf(0.001), 1E-7); assertEquals(0.1202569, instance.cdf(0.01), 1E-7); assertEquals(0.1301085, instance.cdf(0.1), 1E-7); assertEquals(0.1418511, instance.cdf(0.2), 1E-7); assertEquals(0.1824255, instance.cdf(0.5), 1E-7); assertEquals(0.2689414, instance.cdf(1.0), 1E-7); assertEquals(0.5 , instance.cdf(2.0), 1E-7); assertEquals(0.9525741, instance.cdf(5.0), 1E-7); assertEquals(0.9996646, instance.cdf(10.0), 1E-7); }
public String getActualValue() { return actualValue; }
@Test public void getValue_returns_empty_string_if_null_was_passed_in_constructor() { assertThat(new EvaluatedCondition(SOME_CONDITION, SOME_LEVEL, null).getActualValue()).isEmpty(); }
public Object bodyPath(String path) { Object body = LOCAL_REQUEST.get().getBodyConverted(); if (body == null) { return null; } if (path.startsWith("/")) { Variable v = ScenarioEngine.evalXmlPath(new Variable(body), path); if (v.isNotPresent()) { return null; } else { return JsValue.fromJava(v.getValue()); } } else { Json json = Json.of(body); Object result; try { result = json.get(path); } catch (Exception e) { return null; } return JsValue.fromJava(result); } }
@Test void testBodyPath() { background().scenario( "pathMatches('/hello') && bodyPath('$.foo') == 'bar'", "def response = { success: true }" ); request.path("/hello").bodyJson("{ foo: 'bar' }"); handle(); match(response.getBodyConverted(), "{ success: true }"); }
public boolean isInstalled() { return mTrigger != null; }
@Test public void testIntentInstalledWhenSomeActivity() { voiceActivities.add(new ResolveInfo()); Assert.assertTrue(IntentApiTrigger.isInstalled(mMockInputMethodService)); }
void addGetModelsMethod(StringBuilder sb) { sb.append( " @Override\n" + " public java.util.List<Model> getModels() {\n" + " return java.util.Arrays.asList(" ); String collected = modelsByKBase.values().stream().flatMap( List::stream ).distinct() .map(element -> "new " + element + "()") .collect(Collectors.joining(",")); sb.append(collected); sb.append( ");\n" + " }\n" + "\n"); }
@Test public void addGetModelsMethodPopulatedModelsByKBaseValuesTest() { List<String> modelByKBaseValues = Collections.singletonList("ModelTest"); Map<String, List<String>> modelsByKBase = new HashMap<>(); modelsByKBase.put("default-kie", modelByKBaseValues); ModelSourceClass modelSourceClass = new ModelSourceClass(RELEASE_ID, new HashMap<>(), modelsByKBase); StringBuilder sb = new StringBuilder(); modelSourceClass.addGetModelsMethod(sb); String retrieved = sb.toString(); String expected = "return java.util.Arrays.asList(new ModelTest());"; assertThat(retrieved.contains(expected)).isTrue(); String unexpected = "return java.util.Arrays.asList();"; assertThat(retrieved.contains(unexpected)).isFalse(); }
public TaskAcknowledgeResult acknowledgeTask( ExecutionAttemptID executionAttemptId, TaskStateSnapshot operatorSubtaskStates, CheckpointMetrics metrics) { synchronized (lock) { if (disposed) { return TaskAcknowledgeResult.DISCARDED; } final ExecutionVertex vertex = notYetAcknowledgedTasks.remove(executionAttemptId); if (vertex == null) { if (acknowledgedTasks.contains(executionAttemptId)) { return TaskAcknowledgeResult.DUPLICATE; } else { return TaskAcknowledgeResult.UNKNOWN; } } else { acknowledgedTasks.add(executionAttemptId); } long ackTimestamp = System.currentTimeMillis(); if (operatorSubtaskStates != null && operatorSubtaskStates.isTaskDeployedAsFinished()) { checkpointPlan.reportTaskFinishedOnRestore(vertex); } else { List<OperatorIDPair> operatorIDs = vertex.getJobVertex().getOperatorIDs(); for (OperatorIDPair operatorID : operatorIDs) { updateOperatorState(vertex, operatorSubtaskStates, operatorID); } if (operatorSubtaskStates != null && operatorSubtaskStates.isTaskFinished()) { checkpointPlan.reportTaskHasFinishedOperators(vertex); } } ++numAcknowledgedTasks; // publish the checkpoint statistics // to prevent null-pointers from concurrent modification, copy reference onto stack if (pendingCheckpointStats != null) { // Do this in millis because the web frontend works with them long alignmentDurationMillis = metrics.getAlignmentDurationNanos() / 1_000_000; long checkpointStartDelayMillis = metrics.getCheckpointStartDelayNanos() / 1_000_000; SubtaskStateStats subtaskStateStats = new SubtaskStateStats( vertex.getParallelSubtaskIndex(), ackTimestamp, metrics.getBytesPersistedOfThisCheckpoint(), metrics.getTotalBytesPersisted(), metrics.getSyncDurationMillis(), metrics.getAsyncDurationMillis(), metrics.getBytesProcessedDuringAlignment(), metrics.getBytesPersistedDuringAlignment(), alignmentDurationMillis, checkpointStartDelayMillis, metrics.getUnalignedCheckpoint(), true); LOG.trace( "Checkpoint {} stats for {}: size={}Kb, duration={}ms, sync part={}ms, async part={}ms", checkpointId, vertex.getTaskNameWithSubtaskIndex(), subtaskStateStats.getStateSize() == 0 ? 0 : subtaskStateStats.getStateSize() / 1024, subtaskStateStats.getEndToEndDuration( pendingCheckpointStats.getTriggerTimestamp()), subtaskStateStats.getSyncCheckpointDuration(), subtaskStateStats.getAsyncCheckpointDuration()); pendingCheckpointStats.reportSubtaskStats( vertex.getJobvertexId(), subtaskStateStats); } return TaskAcknowledgeResult.SUCCESS; } }
@Test void testReportTaskFinishedOnRestore() throws IOException { RecordCheckpointPlan recordCheckpointPlan = new RecordCheckpointPlan(new ArrayList<>(ACK_TASKS)); PendingCheckpoint checkpoint = createPendingCheckpoint(recordCheckpointPlan); checkpoint.acknowledgeTask( ACK_TASKS.get(0).getAttemptId(), TaskStateSnapshot.FINISHED_ON_RESTORE, new CheckpointMetrics()); assertThat(recordCheckpointPlan.getReportedFinishedOnRestoreTasks()) .contains(ACK_TASKS.get(0).getVertex()); }
CompletableFuture<String> getOperationFuture() { return operationFuture; }
@Test void testExceptionalSavepointCompletionLeadsToExceptionalOperationFutureCompletion() throws Exception { final StopWithSavepoint sws; try (MockStopWithSavepointContext ctx = new MockStopWithSavepointContext()) { CheckpointScheduling mockStopWithSavepointOperations = new MockCheckpointScheduling(); CompletableFuture<String> savepointFuture = new CompletableFuture<>(); sws = createStopWithSavepoint(ctx, mockStopWithSavepointOperations, savepointFuture); ctx.setStopWithSavepoint(sws); ctx.setExpectExecuting(assertNonNull()); savepointFuture.completeExceptionally(new RuntimeException("Test error")); } assertThat(sws.getOperationFuture()).isCompletedExceptionally(); }
public IterableSubject factKeys() { if (!(actual instanceof ErrorWithFacts)) { failWithActual(simpleFact("expected a failure thrown by Truth's failure API")); return ignoreCheck().that(ImmutableList.of()); } ErrorWithFacts error = (ErrorWithFacts) actual; return check("factKeys()").that(getFactKeys(error)); }
@Test public void factKeysFail() { expectFailureWhenTestingThat(fact("foo", "the foo")).factKeys().containsExactly("bar"); Truth.assertThat(expectFailure.getFailure()) .hasMessageThat() .contains("value of: failure.factKeys()"); // TODO(cpovirk): Switch to using fact-based assertions once IterableSubject uses them. }
static String strip(final String line) { return new Parser(line).parse(); }
@Test public void shouldStripDoubleComment() { // Given: final String line = "some line -- this is a comment -- with other dashes"; // Then: assertThat(CommentStripper.strip(line), is("some line")); }
protected Path toPath(final String home) { return new Path(StringUtils.replace(home, "\\", "/"), EnumSet.of(Path.Type.directory)); }
@Test public void testWindowsHome() throws Exception { final LocalSession session = new LocalSession(new Host(new LocalProtocol(), new LocalProtocol().getDefaultHostname())); session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); assertEquals("/C:/Users/Default", new LocalHomeFinderFeature().toPath("C:\\Users\\Default").getAbsolute()); session.close(); }
@SuppressFBWarnings("NP_BOOLEAN_RETURN_NULL") static Boolean isObjectLayoutCompressedOopsOrNull() { if (!UNSAFE_AVAILABLE) { return null; } Integer referenceSize = ReferenceSizeEstimator.getReferenceSizeOrNull(); if (referenceSize == null) { return null; } // when reference size does not equal address size then it's safe to assume references are compressed return referenceSize != UNSAFE.addressSize(); }
@Test public void testIsObjectLayoutCompressedOopsOrNull() { JVMUtil.isObjectLayoutCompressedOopsOrNull(); }
@Override protected WritableByteChannel create(HadoopResourceId resourceId, CreateOptions createOptions) throws IOException { return Channels.newChannel( resourceId.toPath().getFileSystem(configuration).create(resourceId.toPath())); }
@Test public void testCreateAndReadFile() throws Exception { byte[] bytes = "testData".getBytes(StandardCharsets.UTF_8); create("testFile", bytes); assertArrayEquals(bytes, read("testFile", 0)); }
@PATCH @Path("/{connector}/config") public Response patchConnectorConfig(final @PathParam("connector") String connector, final @Context HttpHeaders headers, final @Parameter(hidden = true) @QueryParam("forward") Boolean forward, final Map<String, String> connectorConfigPatch) throws Throwable { FutureCallback<Herder.Created<ConnectorInfo>> cb = new FutureCallback<>(); herder.patchConnectorConfig(connector, connectorConfigPatch, cb); Herder.Created<ConnectorInfo> createdInfo = requestHandler.completeOrForwardRequest(cb, "/connectors/" + connector + "/config", "PATCH", headers, connectorConfigPatch, new TypeReference<ConnectorInfo>() { }, new CreatedConnectorInfoTranslator(), forward); return Response.ok().entity(createdInfo.result()).build(); }
@Test public void testPatchConnectorConfig() throws Throwable { final ArgumentCaptor<Callback<Herder.Created<ConnectorInfo>>> cb = ArgumentCaptor.forClass(Callback.class); expectAndCallbackResult(cb, new Herder.Created<>(true, new ConnectorInfo(CONNECTOR_NAME, CONNECTOR_CONFIG_PATCHED, CONNECTOR_TASK_NAMES, ConnectorType.SINK)) ).when(herder).patchConnectorConfig(eq(CONNECTOR_NAME), eq(CONNECTOR_CONFIG_PATCH), cb.capture()); connectorsResource.patchConnectorConfig(CONNECTOR_NAME, NULL_HEADERS, FORWARD, CONNECTOR_CONFIG_PATCH); }
public static void checkValidWriteSchema(GroupType schema) { schema.accept(new TypeVisitor() { @Override public void visit(GroupType groupType) { if (groupType.getFieldCount() <= 0) { throw new InvalidSchemaException("Cannot write a schema with an empty group: " + groupType); } for (Type type : groupType.getFields()) { type.accept(this); } } @Override public void visit(MessageType messageType) { visit((GroupType) messageType); } @Override public void visit(PrimitiveType primitiveType) {} }); }
@Test public void testWriteCheckGroupType() { TypeUtil.checkValidWriteSchema(Types.repeatedGroup() .required(INT32) .named("a") .optional(BINARY) .as(UTF8) .named("b") .named("valid_group")); TestTypeBuilders.assertThrows( "Should complain about empty GroupType", InvalidSchemaException.class, (Callable<Void>) () -> { TypeUtil.checkValidWriteSchema(new GroupType(REPEATED, "invalid_group")); return null; }); }
@Override public String execute(CommandContext commandContext, String[] args) { Channel channel = commandContext.getRemote(); if (ArrayUtils.isEmpty(args)) { return "Please input service name, eg: \r\ncd XxxService\r\ncd com.xxx.XxxService"; } String message = args[0]; StringBuilder buf = new StringBuilder(); if ("/".equals(message) || "..".equals(message)) { String service = channel.attr(SERVICE_KEY).getAndRemove(); buf.append("Cancelled default service ").append(service).append('.'); } else { boolean found = false; for (Exporter<?> exporter : dubboProtocol.getExporters()) { if (message.equals(exporter.getInvoker().getInterface().getSimpleName()) || message.equals(exporter.getInvoker().getInterface().getName()) || message.equals(exporter.getInvoker().getUrl().getPath()) || message.equals(exporter.getInvoker().getUrl().getServiceKey())) { found = true; break; } } if (found) { channel.attr(SERVICE_KEY).set(message); buf.append("Used the ") .append(message) .append(" as default.\r\nYou can cancel default service by command: cd /"); } else { buf.append("No such service ").append(message); } } return buf.toString(); }
@Test void testChangePath() { ExtensionLoader.getExtensionLoader(Protocol.class) .getExtension(DubboProtocol.NAME) .export(mockInvoker); String result = change.execute(mockCommandContext, new String[] {"demo"}); assertEquals("Used the demo as default.\r\nYou can cancel default service by command: cd /", result); }
@Override public void onText(Keyboard.Key key, CharSequence text) { mKeyboardActionListener.onText(key, text); if (TextUtils.isEmpty(key.label) || TextUtils.isEmpty(text)) return; String name = String.valueOf(key.label); String value = String.valueOf(text); mHistoryQuickTextKey.recordUsedKey(name, value); }
@Test public void onTextWithNoLabel() throws Exception { Keyboard.Key key = Mockito.mock(Keyboard.Key.class); key.label = null; mUnderTest.onText(key, "test"); Mockito.verify(mKeyboardListener).onText(key, "test"); Mockito.verifyZeroInteractions(mHistoryKey); }
public Bson parseSingleExpression(final String filterExpression, final List<EntityAttribute> attributes) { final Filter filter = singleFilterParser.parseSingleExpression(filterExpression, attributes); return filter.toBson(); }
@Test void parsesFilterExpressionCorrectlyForDateType() { assertEquals(Filters.eq("created_at", new DateTime(2012, 12, 12, 12, 12, 12, DateTimeZone.UTC).toDate()), toTest.parseSingleExpression("created_at:2012-12-12 12:12:12", List.of(EntityAttribute.builder() .id("created_at") .title("Creation Date") .type(SearchQueryField.Type.DATE) .filterable(true) .build()) )); }
@Override public void accept(T newPoint) { considerAdvancingCurrentTime(newPoint.time()); if (newPoint.time().isBefore(currentTime)) { lastDroppedPoint = newPoint; rejectedPointHandler.accept(newPoint); } else { targetPointConsumer.accept(newPoint); } }
@Test public void testTestConsumerFailsOnBadInput() { List<Point> testData = testData(0, 1, -1); TestConsumer testConsumer = new TestConsumer(); try { for (Point point : testData) { testConsumer.accept(point); } fail("the 3rd point should cause an exception"); } catch (AssertionError ae) { //properly caught the exception (other would have failed) } }
@Override public void resumeConsumption() { if (initialCredit == 0) { // reset available credit if no exclusive buffer is available at the // consumer side for all floating buffers must have been released numCreditsAvailable = 0; } subpartitionView.resumeConsumption(); }
@Test void testResumeConsumption() throws Exception { int numCredits = 2; CreditBasedSequenceNumberingViewReader reader1 = createNetworkSequenceViewReader(numCredits); reader1.resumeConsumption(); assertThat(reader1.getNumCreditsAvailable()).isEqualTo(numCredits); reader1.addCredit(numCredits); reader1.resumeConsumption(); assertThat(reader1.getNumCreditsAvailable()).isEqualTo(2 * numCredits); CreditBasedSequenceNumberingViewReader reader2 = createNetworkSequenceViewReader(0); reader2.addCredit(numCredits); assertThat(reader2.getNumCreditsAvailable()).isEqualTo(numCredits); reader2.resumeConsumption(); assertThat(reader2.getNumCreditsAvailable()).isZero(); }
public static String join(List<?> list, String delim) { int len = list.size(); if (len == 0) return ""; final StringBuilder result = new StringBuilder(toString(list.get(0), delim)); for (int i = 1; i < len; i++) { result.append(delim); result.append(toString(list.get(i), delim)); } return result.toString(); }
@Test public void testTwoElementJoin() throws IOException { assertEquals("foo,bar", KeyNode.join(Arrays.asList("foo", "bar"), ",")); }
@Override public NvdApiProcessor call() throws Exception { if (jsonFile.getName().endsWith(".jsonarray.gz")) { try (InputStream fis = Files.newInputStream(jsonFile.toPath()); InputStream is = new BufferedInputStream(new GZIPInputStream(fis)); CveItemSource<DefCveItem> itemSource = new JsonArrayCveItemSource(is)) { updateCveDb(itemSource); } } else if (jsonFile.getName().endsWith(".gz")) { try (InputStream fis = Files.newInputStream(jsonFile.toPath()); InputStream is = new BufferedInputStream(new GZIPInputStream(fis)); CveItemSource<DefCveItem> itemSource = new CveApiJson20CveItemSource(is)) { updateCveDb(itemSource); } } else { try (InputStream fis = Files.newInputStream(jsonFile.toPath()); InputStream is = new BufferedInputStream(fis); CveItemSource<DefCveItem> itemSource = new JsonArrayCveItemSource(is)) { updateCveDb(itemSource); } } endTime = System.currentTimeMillis(); return this; }
@Test public void processValidStructure() throws Exception { try (CveDB cve = new CveDB(getSettings())) { File file = File.createTempFile("test", "test.json"); writeFileString(file, "[]"); NvdApiProcessor processor = new NvdApiProcessor(null, file); processor.call(); } }
protected HoodieDefaultTimeline filterInstantsTimeline(HoodieDefaultTimeline timeline) { return HoodieInputFormatUtils.filterInstantsTimeline(timeline); }
@Test public void testPendingCompactionWithActiveCommits() throws IOException { // setup 4 sample instants in timeline List<HoodieInstant> instants = new ArrayList<>(); HoodieInstant t1 = new HoodieInstant(HoodieInstant.State.COMPLETED, HoodieTimeline.COMMIT_ACTION, "1"); HoodieInstant t2 = new HoodieInstant(HoodieInstant.State.INFLIGHT, HoodieTimeline.DELTA_COMMIT_ACTION, "2"); HoodieInstant t3 = new HoodieInstant(HoodieInstant.State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, "3"); HoodieInstant t4 = new HoodieInstant(HoodieInstant.State.COMPLETED, HoodieTimeline.DELTA_COMMIT_ACTION, "4"); HoodieInstant t5 = new HoodieInstant(HoodieInstant.State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, "5"); HoodieInstant t6 = new HoodieInstant(HoodieInstant.State.COMPLETED, HoodieTimeline.DELTA_COMMIT_ACTION, "6"); instants.add(t1); instants.add(t2); instants.add(t3); instants.add(t4); instants.add(t5); instants.add(t6); HoodieTableMetaClient metaClient = HoodieTestUtils.init(basePath.toString(), HoodieFileFormat.HFILE); HoodieActiveTimeline timeline = new HoodieActiveTimeline(metaClient); timeline.setInstants(instants); // Verify getCommitsTimelineBeforePendingCompaction does not return instants after first compaction instant HoodieTimeline filteredTimeline = inputFormat.filterInstantsTimeline(timeline); assertTrue(filteredTimeline.containsInstant(t1)); assertTrue(filteredTimeline.containsInstant(t2)); assertFalse(filteredTimeline.containsInstant(t3)); assertFalse(filteredTimeline.containsInstant(t4)); assertFalse(filteredTimeline.containsInstant(t5)); assertFalse(filteredTimeline.containsInstant(t6)); // remove compaction instant and setup timeline again instants.remove(t3); timeline = new HoodieActiveTimeline(metaClient); timeline.setInstants(instants); filteredTimeline = inputFormat.filterInstantsTimeline(timeline); // verify all remaining instants are returned. assertTrue(filteredTimeline.containsInstant(t1)); assertTrue(filteredTimeline.containsInstant(t2)); assertFalse(filteredTimeline.containsInstant(t3)); assertTrue(filteredTimeline.containsInstant(t4)); assertFalse(filteredTimeline.containsInstant(t5)); assertFalse(filteredTimeline.containsInstant(t6)); // remove remaining compaction instant and setup timeline again instants.remove(t5); timeline = new HoodieActiveTimeline(metaClient); timeline.setInstants(instants); filteredTimeline = inputFormat.filterInstantsTimeline(timeline); // verify all remaining instants are returned. assertTrue(filteredTimeline.containsInstant(t1)); assertTrue(filteredTimeline.containsInstant(t2)); assertFalse(filteredTimeline.containsInstant(t3)); assertTrue(filteredTimeline.containsInstant(t4)); assertFalse(filteredTimeline.containsInstant(t5)); assertTrue(filteredTimeline.containsInstant(t6)); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { final BoxApiClient client = new BoxApiClient(session.getClient()); final HttpGet request = new HttpGet(String.format("%s/files/%s/content", client.getBasePath(), fileid.getFileId(file))); if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); final String header; if(-1 == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } request.addHeader(new BasicHeader(HttpHeaders.RANGE, header)); } final CloseableHttpResponse response = session.getClient().execute(request); return new HttpMethodReleaseInputStream(response, status); } catch(IOException e) { throw new HttpExceptionMappingService().map("Download {0} failed", e, file); } }
@Test public void testReadZeroLength() throws Exception { final BoxFileidProvider fileid = new BoxFileidProvider(session); final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new BoxTouchFeature(session, fileid).touch(test, new TransferStatus()); final InputStream in = new BoxReadFeature(session, fileid).read(test, new TransferStatus().withLength(0L), new DisabledConnectionCallback()); assertNotNull(in); in.close(); new BoxDeleteFeature(session, fileid).delete(Collections.<Path>singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); session.close(); }
public Result waitForConditionAndFinish(Config config, Supplier<Boolean> conditionCheck) throws IOException { return waitForConditionsAndFinish(config, conditionCheck); }
@Test public void testFinishAfterConditionTimeout() throws IOException { when(client.getJobStatus(any(), any(), any())).thenReturn(JobState.RUNNING); Result result = new PipelineOperator(client).waitForConditionAndFinish(DEFAULT_CONFIG, () -> false); verify(client).drainJob(any(), any(), any()); assertThat(result).isEqualTo(Result.TIMEOUT); }
@Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + channel.hashCode(); return result; }
@Test void hashCodeTest() { final int prime = 31; int result = 1; result = prime * result + ((channel == null) ? 0 : channel.hashCode()); Assertions.assertEquals(header.hashCode(), result); }
@Override public boolean equals(Object o) { if (!(o instanceof ErrorMessage)) return false; ErrorMessage other = (ErrorMessage) o; if (this.code != other.code) return false; if (!this.message.equals(other.message)) return false; if (this.detailedMessage==null) return other.detailedMessage==null; if (other.detailedMessage==null) return false; return this.detailedMessage.equals(other.detailedMessage); }
@Test public void testErrorMessageEquality() { assertEquals(new ErrorMessage(17,"Message"),new ErrorMessage(17,"Message")); assertFalse(new ErrorMessage(16,"Message").equals(new ErrorMessage(17,"Message"))); assertFalse(new ErrorMessage(17,"Message").equals(new ErrorMessage(17,"Other message"))); assertFalse(new ErrorMessage(17,"Message").equals(new ErrorMessage(17,"Message","Detail"))); assertFalse(new ErrorMessage(17,"Message","Detail").equals(new ErrorMessage(17,"Message"))); assertEquals(new ErrorMessage(17,"Message","Detail"),new ErrorMessage(17,"Message","Detail",new Exception())); assertTrue(new ErrorMessage(17,"Message","Detail").equals(new ErrorMessage(17,"Message","Detail"))); assertFalse(new ErrorMessage(17,"Message","Detail").equals(new ErrorMessage(17,"Message","Other detail"))); }
public void finish(Promise<Void> aggregatePromise) { ObjectUtil.checkNotNull(aggregatePromise, "aggregatePromise"); checkInEventLoop(); if (this.aggregatePromise != null) { throw new IllegalStateException("Already finished"); } this.aggregatePromise = aggregatePromise; if (doneCount == expectedCount) { tryPromise(); } }
@Test public void testNullAggregatePromise() { combiner.finish(p1); verify(p1).trySuccess(null); }
@Override public Set<Permission> getObjectPermissions() { return getPermissions().stream().map(p -> { if (p.equals("*")) { return new AllPermission(); } else { return new CaseSensitiveWildcardPermission(p); } }).collect(Collectors.toSet()); }
@Test public void getObjectPermissions() { final Permissions permissions = new Permissions(Collections.emptySet()); final List<String> customPermissions = ImmutableList.of("subject:action", "*"); final Map<String, Object> fields = ImmutableMap.of( UserImpl.USERNAME, "foobar", UserImpl.PERMISSIONS, customPermissions); user = createUserImpl(passwordAlgorithmFactory, permissions, fields); final Set<Permission> userSelfEditPermissions = permissions.userSelfEditPermissions("foobar").stream().map(CaseSensitiveWildcardPermission::new).collect(Collectors.toSet()); assertThat(user.getObjectPermissions()) .containsAll(userSelfEditPermissions) .contains(new CaseSensitiveWildcardPermission("subject:action")) .extracting("class").containsOnlyOnce(AllPermission.class); }
public void replay( ConsumerGroupMemberMetadataKey key, ConsumerGroupMemberMetadataValue value ) { String groupId = key.groupId(); String memberId = key.memberId(); ConsumerGroup consumerGroup = getOrMaybeCreatePersistedConsumerGroup(groupId, value != null); Set<String> oldSubscribedTopicNames = new HashSet<>(consumerGroup.subscribedTopicNames().keySet()); if (value != null) { ConsumerGroupMember oldMember = consumerGroup.getOrMaybeCreateMember(memberId, true); consumerGroup.updateMember(new ConsumerGroupMember.Builder(oldMember) .updateWith(value) .build()); } else { ConsumerGroupMember oldMember = consumerGroup.getOrMaybeCreateMember(memberId, false); if (oldMember.memberEpoch() != LEAVE_GROUP_MEMBER_EPOCH) { throw new IllegalStateException("Received a tombstone record to delete member " + memberId + " but did not receive ConsumerGroupCurrentMemberAssignmentValue tombstone."); } if (consumerGroup.targetAssignment().containsKey(memberId)) { throw new IllegalStateException("Received a tombstone record to delete member " + memberId + " but did not receive ConsumerGroupTargetAssignmentMetadataValue tombstone."); } consumerGroup.removeMember(memberId); } updateGroupsByTopics(groupId, oldSubscribedTopicNames, consumerGroup.subscribedTopicNames().keySet()); }
@Test public void testOnClassicGroupStateTransitionOnLoading() { GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder() .build(); ClassicGroup group = new ClassicGroup( new LogContext(), "group-id", EMPTY, context.time, context.metrics ); // Even if there are more group metadata records loaded than tombstone records, the last replayed record // (tombstone in this test) is the latest state of the group. Hence, the overall metric count should be 0. IntStream.range(0, 5).forEach(__ -> context.replay(GroupCoordinatorRecordHelpers.newGroupMetadataRecord(group, Collections.emptyMap(), MetadataVersion.LATEST_PRODUCTION)) ); IntStream.range(0, 4).forEach(__ -> context.replay(GroupCoordinatorRecordHelpers.newGroupMetadataTombstoneRecord("group-id")) ); verify(context.metrics, times(1)).onClassicGroupStateTransition(null, EMPTY); verify(context.metrics, times(1)).onClassicGroupStateTransition(EMPTY, null); }
public EndpointResponse checkHealth() { return EndpointResponse.ok(getResponse()); }
@Test public void shouldCheckHealth() { // When: final EndpointResponse response = healthCheckResource.checkHealth(); // Then: verify(healthCheckAgent).checkHealth(); assertThat(response.getStatus(), is(200)); assertThat(response.getEntity(), instanceOf(HealthCheckResponse.class)); }
public String redirectWithCorrectAttributesForAd(HttpServletRequest httpRequest, AuthenticationRequest authenticationRequest) throws SamlParseException { try { String redirectUrl; SamlSession samlSession = authenticationRequest.getSamlSession(); if (samlSession.getValidationStatus() != null && samlSession.getValidationStatus().equals(STATUS_INVALID.label)) { return cancelAuthenticationToAd(authenticationRequest, samlSession.getArtifact()); } else if (authenticationRequest.getIdpAssertion() == null) { String returnUrl = generateReturnUrl(httpRequest, authenticationRequest.getSamlSession().getArtifact(), REDIRECT_WITH_ARTIFACT_URL); redirectUrl = prepareAuthenticationToAd(returnUrl, authenticationRequest); logger.info("Authentication sent to Ad: {}", redirectUrl); } else { redirectUrl = prepareBvdSession(authenticationRequest); logger.info("Redirected to BVD: {}", redirectUrl); } return redirectUrl; } catch (MetadataException | BvdException | DecryptionException | SamlSessionException e) { throw new SamlParseException("BVD exception starting session", e); } catch (UnsupportedEncodingException e) { throw new SamlParseException("Authentication cannot encode RelayState", e); } }
@Test public void redirectWithCorrectAttributesForAdTest() throws SamlParseException { AuthenticationRequest authenticationRequest = new AuthenticationRequest(); authenticationRequest.setRequest(httpServletRequestMock); SamlSession samlSession = new SamlSession(1L); samlSession.setRequesterId("DvEntity"); samlSession.setArtifact("artifact"); authenticationRequest.setSamlSession(samlSession); String result = authenticationIdpService.redirectWithCorrectAttributesForAd(httpServletRequestMock, authenticationRequest); assertNotNull(result); assertNull(samlSession.getTransactionId()); assertEquals(result, frontChannel); }
@Bean public CorsFilter corsFilter() { UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(); CorsConfiguration config = jHipsterProperties.getCors(); if (!CollectionUtils.isEmpty(config.getAllowedOrigins()) || !CollectionUtils.isEmpty(config.getAllowedOriginPatterns())) { log.debug("Registering CORS filter"); source.registerCorsConfiguration("/api/**", config); source.registerCorsConfiguration("/management/**", config); source.registerCorsConfiguration("/v3/api-docs", config); source.registerCorsConfiguration("/swagger-ui/**", config); } return new CorsFilter(source); }
@Test void shouldCorsFilterOnOtherPath() throws Exception { props.getCors().setAllowedOrigins(Collections.singletonList("*")); props.getCors().setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "DELETE")); props.getCors().setAllowedHeaders(Collections.singletonList("*")); props.getCors().setMaxAge(1800L); props.getCors().setAllowCredentials(true); MockMvc mockMvc = MockMvcBuilders.standaloneSetup(new WebConfigurerTestController()).addFilters(webConfigurer.corsFilter()).build(); mockMvc .perform(get("/test/test-cors").header(HttpHeaders.ORIGIN, "other.domain.com")) .andExpect(status().isOk()) .andExpect(header().doesNotExist(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)); }
public String normalize(final String name) { if(StringUtils.equals(name, ".")) { return finder.find().getAbsolute(); } if(StringUtils.equals(name, "..")) { return finder.find().getParent().getAbsolute(); } if(!this.isAbsolute(name)) { return String.format("%s%s%s", finder.find().getAbsolute(), PreferencesFactory.get().getProperty("local.delimiter"), name); } return name; }
@Test public void testNormalize() { assertEquals(System.getProperty("user.dir") + File.separator+ "n", new WorkdirPrefixer().normalize("n")); }
@Override public void release(T item) { checkNotNull(item, "item"); synchronized (createdItems) { if (!createdItems.contains(item)) { throw new IllegalArgumentException("This item is not a part of this pool"); } } // Return if this item was released earlier. // We cannot use items.contains() because that check is not based on reference equality. for (T entry : items) { if (entry == item) { return; } } try { items.put(item); } catch (InterruptedException e) { throw new IllegalStateException("release() should never block", e); } }
@Test public void testArgChecks() throws Exception { // Should not throw. BufferPool pool = new BufferPool(5); // Verify it throws correctly. intercept(IllegalArgumentException.class, "'size' must be a positive integer", () -> new BufferPool(-1)); intercept(IllegalArgumentException.class, "'size' must be a positive integer", () -> new BufferPool(0)); intercept(IllegalArgumentException.class, "'item' must not be null", () -> pool.release(null)); intercept(IllegalArgumentException.class, "This item is not a part of this pool", () -> pool.release(ByteBuffer.allocate(4))); }
@ReadOperation public Map<String, Object> router(@Selector String dstService) { Map<String, Object> result = new HashMap<>(); if (StringUtils.hasText(dstService)) { List<RoutingProto.Route> routerRules = serviceRuleManager.getServiceRouterRule(MetadataContext.LOCAL_NAMESPACE, MetadataContext.LOCAL_SERVICE, dstService); List<Object> rules = new LinkedList<>(); result.put("routerRules", rules); if (CollectionUtils.isEmpty(routerRules)) { return result; } for (RoutingProto.Route route : routerRules) { rules.add(parseRouterRule(route)); } } return result; }
@Test public void testHasRouterRule() { Map<String, ModelProto.MatchString> labels = new HashMap<>(); ModelProto.MatchString matchString = ModelProto.MatchString.getDefaultInstance(); String validKey1 = "${http.header.uid}"; String validKey2 = "${http.query.name}"; String validKey3 = "${http.method}"; String validKey4 = "${http.uri}"; String validKey5 = "${http.body.customkey}"; String invalidKey = "$http.expression.wrong}"; labels.put(validKey1, matchString); labels.put(validKey2, matchString); labels.put(validKey3, matchString); labels.put(validKey4, matchString); labels.put(validKey5, matchString); labels.put(invalidKey, matchString); RoutingProto.Source source1 = RoutingProto.Source.newBuilder().putAllMetadata(labels).build(); RoutingProto.Source source2 = RoutingProto.Source.newBuilder().putAllMetadata(labels).build(); RoutingProto.Source source3 = RoutingProto.Source.newBuilder().putAllMetadata(new HashMap<>()).build(); List<RoutingProto.Route> routes = new LinkedList<>(); RoutingProto.Route route = RoutingProto.Route.newBuilder() .addAllSources(Lists.list(source1, source2, source3)) .build(); routes.add(route); when(serviceRuleManager.getServiceRouterRule(anyString(), anyString(), anyString())).thenReturn(routes); Map<String, Object> actuator = polarisRouterEndpoint.router(testDestService); assertThat(actuator.get("routerRules")).isNotNull(); assertThat(((List<?>) actuator.get("routerRules")).size()).isEqualTo(1); }
public PendingSpan getOrCreate( @Nullable TraceContext parent, TraceContext context, boolean start) { PendingSpan result = get(context); if (result != null) return result; MutableSpan span = new MutableSpan(context, defaultSpan); PendingSpan parentSpan = parent != null ? get(parent) : null; // save overhead calculating time if the parent is in-progress (usually is) TickClock clock; if (parentSpan != null) { TraceContext parentContext = parentSpan.context(); if (parentContext != null) parent = parentContext; clock = parentSpan.clock; if (start) span.startTimestamp(clock.currentTimeMicroseconds()); } else { long currentTimeMicroseconds = this.clock.currentTimeMicroseconds(); clock = new TickClock(platform, currentTimeMicroseconds, platform.nanoTime()); if (start) span.startTimestamp(currentTimeMicroseconds); } PendingSpan newSpan = new PendingSpan(context, span, clock); // Probably absent because we already checked with get() at the entrance of this method PendingSpan previousSpan = putIfProbablyAbsent(context, newSpan); if (previousSpan != null) return previousSpan; // lost race // We've now allocated a new trace context. assert parent != null || context.isLocalRoot() : "Bug (or unexpected call to internal code): parent can only be null in a local root!"; spanHandler.begin(newSpan.handlerContext, newSpan.span, parentSpan != null ? parentSpan.handlerContext : null); return newSpan; }
@Test void getOrCreate_lazyCreatesASpan() { PendingSpan span = pendingSpans.getOrCreate(null, context, false); assertThat(span).isNotNull(); }
public static <K, V> WriteRecords<K, V> writeRecords() { return new AutoValue_KafkaIO_WriteRecords.Builder<K, V>() .setProducerConfig(WriteRecords.DEFAULT_PRODUCER_PROPERTIES) .setEOS(false) .setNumShards(0) .setConsumerFactoryFn(KafkaIOUtils.KAFKA_CONSUMER_FACTORY_FN) .setBadRecordRouter(BadRecordRouter.THROWING_ROUTER) .setBadRecordErrorHandler(new DefaultErrorHandler<>()) .build(); }
@Test public void testKafkaWriteHeaders() throws Exception { // Set different output topic names int numElements = 1; SimpleEntry<String, String> header = new SimpleEntry<>("header_key", "header_value"); try (MockProducerWrapper producerWrapper = new MockProducerWrapper(new LongSerializer())) { ProducerSendCompletionThread completionThread = new ProducerSendCompletionThread(producerWrapper.mockProducer).start(); String defaultTopic = "test"; p.apply(mkKafkaReadTransform(numElements, new ValueAsTimestampFn()).withoutMetadata()) .apply( ParDo.of( new KV2ProducerRecord(defaultTopic, true, System.currentTimeMillis(), header))) .setCoder(ProducerRecordCoder.of(VarIntCoder.of(), VarLongCoder.of())) .apply( KafkaIO.<Integer, Long>writeRecords() .withBootstrapServers("none") .withKeySerializer(IntegerSerializer.class) .withValueSerializer(LongSerializer.class) .withInputTimestamp() .withProducerFactoryFn(new ProducerFactoryFn(producerWrapper.producerKey))); p.run(); completionThread.shutdown(); // Verify that appropriate header is written with producer record List<ProducerRecord<Integer, Long>> sent = producerWrapper.mockProducer.history(); for (int i = 0; i < numElements; i++) { ProducerRecord<Integer, Long> record = sent.get(i); Headers headers = record.headers(); assertNotNull(headers); Header[] headersArray = headers.toArray(); assertEquals(1, headersArray.length); assertEquals(header.getKey(), headersArray[0].key()); assertEquals( header.getValue(), new String(headersArray[0].value(), StandardCharsets.UTF_8)); } } }
public static ByteBufFlux fromPath(Path path) { return fromPath(path, MAX_CHUNK_SIZE); }
@Test void testFromPath() throws Exception { // Create a temporary file with some binary data that will be read in chunks using the ByteBufFlux final int chunkSize = 3; final Path tmpFile = new File(temporaryDirectory, "content.in").toPath(); final byte[] data = new byte[]{0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9}; Files.write(tmpFile, data); // Make sure the file is 10 bytes (i.e. the same as the data length) assertThat(data.length).isEqualTo(Files.size(tmpFile)); // Use the ByteBufFlux to read the file in chunks of 3 bytes max and write them into a ByteArrayOutputStream for verification final Iterator<ByteBuf> it = ByteBufFlux.fromPath(tmpFile, chunkSize) .toIterable() .iterator(); final ByteArrayOutputStream out = new ByteArrayOutputStream(); while (it.hasNext()) { ByteBuf bb = it.next(); byte[] read = new byte[bb.readableBytes()]; bb.readBytes(read); bb.release(); assertThat(bb.readableBytes()).isEqualTo(0); out.write(read); } // Verify that we read the file. assertThat(data).isEqualTo(out.toByteArray()); System.out.println(Files.exists(tmpFile)); }
@Override public Integer call() throws Exception { super.call(); if (fileValue != null) { value = Files.readString(Path.of(fileValue.toString().trim())); } if (isLiteral(value) || type == Type.STRING) { value = wrapAsJsonLiteral(value); } Duration ttl = expiration == null ? null : Duration.parse(expiration); MutableHttpRequest<String> request = HttpRequest .PUT(apiUri("/namespaces/") + namespace + "/kv/" + key, value) .contentType(MediaType.APPLICATION_JSON_TYPE); if (ttl != null) { request.header("ttl", ttl.toString()); } try (DefaultHttpClient client = client()) { client.toBlocking().exchange(this.requestOptions(request)); } return 0; }
@Test void fromFile() throws IOException, ResourceExpiredException { try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) { EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class); embeddedServer.start(); File file = File.createTempFile("objectFromFile", ".json"); file.createNewFile(); file.deleteOnExit(); Files.write(file.toPath(), "{\"some\":\"json\",\"from\":\"file\"}".getBytes()); String[] args = { "--server", embeddedServer.getURL().toString(), "--user", "myuser:pass:word", "io.kestra.cli", "objectFromFile", "valueThatWillGetOverriden", "-f " + file.getAbsolutePath() }; PicocliRunner.call(KvUpdateCommand.class, ctx, args); KVStoreService kvStoreService = ctx.getBean(KVStoreService.class); KVStore kvStore = kvStoreService.get(null, "io.kestra.cli", null); assertThat(kvStore.getValue("objectFromFile").get(), is(new KVValue(Map.of("some", "json", "from", "file")))); assertThat(((InternalKVStore)kvStore).getRawValue("objectFromFile").get(), is("{some:\"json\",from:\"file\"}")); } }
public static String getClientIp(ServerHttpRequest request) { for (String header : IP_HEADER_NAMES) { String ipList = request.getHeaders().getFirst(header); if (StringUtils.hasText(ipList) && !UNKNOWN.equalsIgnoreCase(ipList)) { String[] ips = ipList.trim().split("[,;]"); for (String ip : ips) { if (StringUtils.hasText(ip) && !UNKNOWN.equalsIgnoreCase(ip)) { return ip; } } } } var remoteAddress = request.getRemoteAddress(); return remoteAddress == null || remoteAddress.isUnresolved() ? UNKNOWN : remoteAddress.getAddress().getHostAddress(); }
@Test void testGetIPAddressFromCloudflareProxy() { var request = MockServerHttpRequest.get("/") .header("CF-Connecting-IP", "127.0.0.1") .build(); var expected = "127.0.0.1"; var actual = IpAddressUtils.getClientIp(request); assertEquals(expected, actual); }
public static Date parseHttpDate(CharSequence txt) { return parseHttpDate(txt, 0, txt.length()); }
@Test public void testParseWithSingleDigitHourMinutesAndSecond() { assertEquals(DATE, parseHttpDate("Sunday, 06-Nov-94 8:49:37 GMT")); }
@Override public InvokerWrapper getInvokerWrapper() { return invokerWrapper; }
@Test(expected = NullPointerException.class) public void testInvokerWrapper_invokeOnTarget_whenAddressIsNull_thenThrowException() { context.getInvokerWrapper().invokeOnTarget(new Object(), null); }
static Map<Integer, ClusterControllerStateRestAPI.Socket> getClusterControllerSockets(ClusterInfoConfig config) { Map<Integer, ClusterControllerStateRestAPI.Socket> result = new TreeMap<>(); for (ClusterInfoConfig.Services service : config.services()) { for (ClusterInfoConfig.Services.Ports port : service.ports()) { Set<String> tags = parseTags(port.tags()); if (tags.contains("http") && tags.contains("state")) { result.put(service.index(), new ClusterControllerStateRestAPI.Socket(service.hostname(), port.number())); break; } } } if (result.isEmpty()) { log.warning("Found no cluster controller in model config"); } else if (log.isLoggable(Level.FINE)) { StringBuilder sb = new StringBuilder(); sb.append("Found ").append(result.size()).append(" cluster controllers in model config:"); for (Map.Entry<Integer, ClusterControllerStateRestAPI.Socket> e : result.entrySet()) { sb.append("\n ").append(e.getKey()).append(" -> ").append(e.getValue()); } log.fine(sb.toString()); } return result; }
@Test void testMappingOfIndexToClusterControllers() { ClusterInfoConfig.Builder builder = new ClusterInfoConfig.Builder() .clusterId("cluster-id") .nodeCount(1) .services(new ClusterInfoConfig.Services.Builder() .index(1) .hostname("host-1") .ports(new ClusterInfoConfig.Services.Ports.Builder().number(80).tags("state http")) .ports(new ClusterInfoConfig.Services.Ports.Builder().number(81).tags("ignored port http"))) .services(new ClusterInfoConfig.Services.Builder() .index(3) .hostname("host-3") .ports(new ClusterInfoConfig.Services.Ports.Builder().number(85).tags("state http")) .ports(new ClusterInfoConfig.Services.Ports.Builder().number(86).tags("foo http bar state"))); ClusterInfoConfig config = new ClusterInfoConfig(builder); Map<Integer, ClusterControllerStateRestAPI.Socket> mapping = StateRestApiV2Handler.getClusterControllerSockets(config); Map<Integer, ClusterControllerStateRestAPI.Socket> expected = new TreeMap<>(); expected.put(1, new ClusterControllerStateRestAPI.Socket("host-1", 80)); expected.put(3, new ClusterControllerStateRestAPI.Socket("host-3", 85)); assertEquals(expected, mapping); }
public static boolean areValidChoices(@NonNull String choices) { String strippedChoices = choices.trim(); return strippedChoices != null && !strippedChoices.isEmpty() && strippedChoices.split(CHOICES_DELIMITER).length > 0; }
@Test public void shouldValidateChoices() { assertFalse(ChoiceParameterDefinition.areValidChoices("")); assertFalse(ChoiceParameterDefinition.areValidChoices(" ")); assertTrue(ChoiceParameterDefinition.areValidChoices("abc")); assertTrue(ChoiceParameterDefinition.areValidChoices("abc\ndef")); assertTrue(ChoiceParameterDefinition.areValidChoices("abc\r\ndef")); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { final boolean exists = Files.exists(session.toPath(file), LinkOption.NOFOLLOW_LINKS); if(exists) { if(Files.isSymbolicLink(session.toPath(file))) { return true; } if(!file.isRoot()) { try { if(!StringUtils.equals(session.toPath(file).toFile().getCanonicalFile().getName(), file.getName())) { return false; } } catch(IOException e) { log.warn(String.format("Failure obtaining canonical file reference for %s", file)); } } } return exists; }
@Test public void testFindRoot() throws Exception { final LocalSession session = new LocalSession(new Host(new LocalProtocol(), new LocalProtocol().getDefaultHostname())); session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); assertTrue(new LocalFindFeature(session).find(new Path("/", EnumSet.of(Path.Type.directory)))); }
@Override public int delete(final String username) { DBObject query = new BasicDBObject(); query.put(UserImpl.USERNAME, username); final List<DBObject> result = query(UserImpl.class, query); if (result == null || result.isEmpty()) { return 0; } final ImmutableList.Builder<UserDeletedEvent> deletedUsersBuilder = ImmutableList.builder(); result.forEach(userObject -> { final ObjectId userId = (ObjectId) userObject.get("_id"); deletedUsersBuilder.add(UserDeletedEvent.create(userId.toHexString(), username)); }); LOG.debug("Deleting user(s) with username \"{}\"", username); query = BasicDBObjectBuilder.start(UserImpl.USERNAME, username).get(); final int deleteCount = destroy(query, UserImpl.COLLECTION_NAME); if (deleteCount > 1) { LOG.warn("Removed {} users matching username \"{}\".", deleteCount, username); } accesstokenService.deleteAllForUser(username); //TODO: probably should go through listener subscribing to delete event final ImmutableList<UserDeletedEvent> deletedUsers = deletedUsersBuilder.build(); deletedUsers.forEach(serverEventBus::post); return deleteCount; }
@Test @MongoDBFixtures("UserServiceImplTest.json") public void testDeleteByName() throws Exception { assertThat(userService.delete("user1")).isEqualTo(1); assertThat(userService.delete("user-duplicate")).isEqualTo(2); assertThat(userService.delete("user-does-not-exist")).isEqualTo(0); }
public static StatementExecutorResponse execute( final ConfiguredStatement<CreateConnector> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final CreateConnector createConnector = statement.getStatement(); final ConnectClient client = serviceContext.getConnectClient(); final Optional<KsqlEntity> connectorsResponse = handleIfNotExists( statement, createConnector, client); if (connectorsResponse.isPresent()) { return StatementExecutorResponse.handled(connectorsResponse); } final ConnectResponse<ConnectorInfo> response = client.create( createConnector.getName(), buildConnectorConfig(createConnector)); if (response.datum().isPresent()) { return StatementExecutorResponse.handled(Optional.of( new CreateConnectorEntity( statement.getMaskedStatementText(), response.datum().get() ) )); } if (response.error().isPresent()) { final String errorMsg = "Failed to create connector: " + response.error().get(); throw new KsqlRestException(EndpointResponse.create() .status(response.httpCode()) .entity(new KsqlErrorMessage(Errors.toErrorCode(response.httpCode()), errorMsg)) .build() ); } throw new IllegalStateException("Either response.datum() or response.error() must be present"); }
@SuppressWarnings("unchecked") @Test public void shouldPassInCorrectArgsToConnectClientOnExecute() { // Given: givenCreationSuccess(); // When: ConnectExecutor .execute(CREATE_CONNECTOR_CONFIGURED, mock(SessionProperties.class), null, serviceContext); // Then: verify(connectClient).create(eq("foo"), (Map<String, String>) and( argThat(hasEntry("connector.class", "FileStreamSource")), argThat(hasEntry("name", "foo")))); }
@Override public String toString() { return "SQL_METHOD"; }
@Test public void testToString() { String expected = "SQL_METHOD"; assertEquals(expected.trim(), SqlMethodExpr.get().toString().trim()); }
@Override public final boolean readBoolean() throws EOFException { final int ch = read(); if (ch < 0) { throw new EOFException(); } return (ch != 0); }
@Test(expected = EOFException.class) public void testReadBooleanPosition_EOF() throws Exception { in.readBoolean(INIT_DATA.length + 1); }
@Override public Batch toBatch() { return new SparkBatch( sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode()); }
@Test public void testPartitionedIsNull() throws Exception { createPartitionedTable(spark, tableName, "truncate(4, data)"); SparkScanBuilder builder = scanBuilder(); TruncateFunction.TruncateString function = new TruncateFunction.TruncateString(); UserDefinedScalarFunc udf = toUDF(function, expressions(intLit(4), fieldRef("data"))); Predicate predicate = new Predicate("IS_NULL", expressions(udf)); pushFilters(builder, predicate); Batch scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(0); // NOT IsNULL builder = scanBuilder(); predicate = new Not(predicate); pushFilters(builder, predicate); scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(10); }
@Override public final void isEqualTo(@Nullable Object other) { if (Objects.equal(actual, other)) { return; } // Fail but with a more descriptive message: if (actual == null || !(other instanceof Map)) { super.isEqualTo(other); return; } containsEntriesInAnyOrder((Map<?, ?>) other, /* allowUnexpected= */ false); }
@Test public void isEqualToActualMapOtherNull() { expectFailureWhenTestingThat(ImmutableMap.of()).isEqualTo(null); }
public static String readFile(String path) { StringBuilder builder = new StringBuilder(); File file = new File(path); if (!file.isFile()) { throw new BusException(StrUtil.format("File path {} is not a file.", path)); } try (InputStreamReader inputStreamReader = new InputStreamReader(Files.newInputStream(file.toPath()), StandardCharsets.UTF_8); BufferedReader bufferedReader = new BufferedReader(inputStreamReader)) { String content; while ((content = bufferedReader.readLine()) != null) { builder.append("\n"); builder.append(content); } } catch (Exception e) { e.printStackTrace(); } return builder.toString(); }
@Ignore @Test public void testReadFile() { String result = DirUtil.readFile(DirConstant.LOG_DIR_PATH + "/dinky.log"); Assertions.assertThat(result).isNotNull(); }
public static double pixelYToLatitude(double pixelY, long mapSize) { if (pixelY < 0 || pixelY > mapSize) { throw new IllegalArgumentException("invalid pixelY coordinate " + mapSize + ": " + pixelY); } double y = 0.5 - (pixelY / mapSize); return 90 - 360 * Math.atan(Math.exp(-y * (2 * Math.PI))) / Math.PI; }
@Test public void pixelYToLatitudeTest() { for (int tileSize : TILE_SIZES) { for (byte zoomLevel = ZOOM_LEVEL_MIN; zoomLevel <= ZOOM_LEVEL_MAX; ++zoomLevel) { long mapSize = MercatorProjection.getMapSize(zoomLevel, tileSize); double latitude = MercatorProjection.pixelYToLatitude(0, mapSize); Assert.assertEquals(MercatorProjection.LATITUDE_MAX, latitude, 0); latitude = MercatorProjection.pixelYToLatitudeWithScaleFactor(0, MercatorProjection.zoomLevelToScaleFactor(zoomLevel), tileSize); Assert.assertEquals(MercatorProjection.LATITUDE_MAX, latitude, 0); latitude = MercatorProjection.pixelYToLatitude((float) mapSize / 2, mapSize); Assert.assertEquals(0, latitude, 0); mapSize = MercatorProjection.getMapSizeWithScaleFactor(MercatorProjection.zoomLevelToScaleFactor(zoomLevel), tileSize); latitude = MercatorProjection.pixelYToLatitudeWithScaleFactor((float) mapSize / 2, MercatorProjection.zoomLevelToScaleFactor(zoomLevel), tileSize); Assert.assertEquals(0, latitude, 0); latitude = MercatorProjection.pixelYToLatitude(mapSize, mapSize); Assert.assertEquals(MercatorProjection.LATITUDE_MIN, latitude, 0); latitude = MercatorProjection.pixelYToLatitudeWithScaleFactor(mapSize, MercatorProjection.zoomLevelToScaleFactor(zoomLevel), tileSize); Assert.assertEquals(MercatorProjection.LATITUDE_MIN, latitude, 0); } verifyInvalidPixelYToLatitude(-1, (byte) 0, tileSize); verifyInvalidPixelYToLatitude(tileSize + 1, (byte) 0, tileSize); } }
public List<ClusterStateHistoryEntry> getClusterStateHistory() { return clusterStateHistory.getHistory(); }
@Test void applying_state_adds_to_cluster_state_history() { final StateVersionTracker versionTracker = createWithMockedMetrics(); updateAndPromote(versionTracker, stateWithoutAnnotations("distributor:2 storage:2"), 100); updateAndPromote(versionTracker, stateWithoutAnnotations("distributor:3 storage:3"), 200); updateAndPromote(versionTracker, stateWithoutAnnotations("distributor:4 storage:4"), 300); String s4 = "version:4 distributor:4 storage:4"; String s3 = "version:3 distributor:3 storage:3"; String s2 = "version:2 distributor:2 storage:2"; // Note: newest entry first assertEquals(List.of(historyEntry(s4, s3, 300), historyEntry(s3, s2, 200), historyEntry(s2, 100)), versionTracker.getClusterStateHistory()); }
@Override public Result reconcile(Request request) { client.fetch(Reason.class, request.name()).ifPresent(reason -> { if (ExtensionUtil.isDeleted(reason)) { return; } if (ExtensionUtil.addFinalizers(reason.getMetadata(), Set.of(TRIGGERED_FINALIZER))) { // notifier onNewReasonReceived(reason); client.update(reason); } }); return Result.doNotRetry(); }
@Test void reconcile() { var reason = mock(Reason.class); var metadata = mock(Metadata.class); when(reason.getMetadata()).thenReturn(metadata); when(metadata.getDeletionTimestamp()).thenReturn(null); when(metadata.getFinalizers()).thenReturn(Set.of()); when(client.fetch(eq(Reason.class), eq("fake-reason"))) .thenReturn(Optional.of(reason)); when(notificationCenter.notify(eq(reason))).thenReturn(Mono.empty()); notificationTrigger.reconcile(new Reconciler.Request("fake-reason")); verify(notificationCenter).notify(eq(reason)); verify(metadata).setFinalizers(eq(Set.of(NotificationTrigger.TRIGGERED_FINALIZER))); verify(client).update(any(Reason.class)); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { try { final AttributedList<Path> buckets = new AttributedList<Path>(); final Ds3Client client = new SpectraClientBuilder().wrap(session.getClient(), session.getHost()); final GetServiceResponse response = client.getService(new GetServiceRequest()); for(final BucketDetails b : response.getListAllMyBucketsResult().getBuckets()) { final Path bucket = new Path(PathNormalizer.normalize(b.getName()), EnumSet.of(Path.Type.volume, Path.Type.directory)); bucket.attributes().setCreationDate(b.getCreationDate().getTime()); buckets.add(bucket); } return buckets; } catch(FailedRequestException e) { throw new SpectraExceptionMappingService().map("Listing directory {0} failed", e, directory); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Listing directory {0} failed", e, directory); } }
@Test public void testList() throws Exception { new SpectraBucketListService(session).list( new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)), new DisabledListProgressListener()); }
@Override public void set(RubyTimestamp value) { this.value = value == null ? null : value.getTimestamp(); }
@Test public void set() { RubyTimeStampGauge gauge = new RubyTimeStampGauge("bar"); gauge.set(RUBY_TIMESTAMP); assertThat(gauge.getValue()).isEqualTo(RUBY_TIMESTAMP.getTimestamp()); assertThat(gauge.getType()).isEqualTo(MetricType.GAUGE_RUBYTIMESTAMP); }
@SqlNullable @Description("Returns whether the first string starts with the second") @ScalarFunction("starts_with") @LiteralParameters({"x", "y"}) @SqlType(StandardTypes.BOOLEAN) public static Boolean startsWith(@SqlType("varchar(x)") Slice x, @SqlType("varchar(y)") Slice y) { if (x.length() < y.length()) { return false; } return x.equals(0, y.length(), y, 0, y.length()); }
@Test public void testStartsWith() { assertFunction("starts_with('abcd', 'ab')", BOOLEAN, true); assertFunction("starts_with('abcd', '')", BOOLEAN, true); assertFunction("starts_with('abcd', 'ba')", BOOLEAN, false); assertFunction("starts_with('', 'ba')", BOOLEAN, false); assertFunction("starts_with(NULL, 'ba')", BOOLEAN, null); assertFunction("starts_with('abcd', NULL)", BOOLEAN, null); assertFunction("starts_with('', NULL)", BOOLEAN, null); assertFunction("starts_with(NULL, '')", BOOLEAN, null); assertFunction("starts_with(NULL, NULL)", BOOLEAN, null); }
@Override public boolean retryRequest( HttpRequest request, IOException exception, int execCount, HttpContext context) { if (execCount > maxRetries) { // Do not retry if over max retries return false; } if (nonRetriableExceptions.contains(exception.getClass())) { return false; } else { for (Class<? extends IOException> rejectException : nonRetriableExceptions) { if (rejectException.isInstance(exception)) { return false; } } } if (request instanceof CancellableDependency && ((CancellableDependency) request).isCancelled()) { return false; } // Retry if the request is considered idempotent return Method.isIdempotent(request.getMethod()); }
@Test public void testRetryGatewayTimeout() { BasicHttpResponse response504 = new BasicHttpResponse(504, "Gateway timeout"); assertThat(retryStrategy.retryRequest(response504, 3, null)).isTrue(); }
public List<ScanFilterData> createScanFilterDataForBeaconParser(BeaconParser beaconParser, List<Identifier> identifiers) { ArrayList<ScanFilterData> scanFilters = new ArrayList<ScanFilterData>(); long typeCode = beaconParser.getMatchingBeaconTypeCode(); int startOffset = beaconParser.getMatchingBeaconTypeCodeStartOffset(); int endOffset = beaconParser.getMatchingBeaconTypeCodeEndOffset(); byte[] typeCodeBytes = BeaconParser.longToByteArray(typeCode, endOffset-startOffset+1); if (identifiers != null && identifiers.size() > 0 && identifiers.get(0) != null && beaconParser.getMatchingBeaconTypeCode() == 0x0215) { // If type code 0215 ibeacon, we allow also adding identifiers to the filter for (int manufacturer : beaconParser.getHardwareAssistManufacturers()) { ScanFilterData sfd = new ScanFilterData(); sfd.manufacturer = manufacturer; int length = 18; if (identifiers.size() == 2) { length = 20; } if (identifiers.size() == 3) { length = 22; } sfd.filter = new byte[length]; sfd.filter[0] = typeCodeBytes[0]; sfd.filter[1] = typeCodeBytes[1]; byte[] idBytes = identifiers.get(0).toByteArray(); for (int i = 0; i < idBytes.length; i++) { sfd.filter[i+2] = idBytes[i]; } if (identifiers.size() > 1 && identifiers.get(1) != null) { idBytes = identifiers.get(1).toByteArray(); for (int i = 0; i < idBytes.length; i++) { sfd.filter[i+18] = idBytes[i]; } } if (identifiers.size() > 2 && identifiers.get(2) != null) { idBytes = identifiers.get(2).toByteArray(); for (int i = 0; i < idBytes.length; i++) { sfd.filter[i+20] = idBytes[i]; } } sfd.mask = new byte[length]; for (int i = 0 ; i < length; i++) { sfd.mask[i] = (byte) 0xff; } sfd.serviceUuid = null; sfd.serviceUuid128Bit = new byte[0]; scanFilters.add(sfd); return scanFilters; } } for (int manufacturer : beaconParser.getHardwareAssistManufacturers()) { ScanFilterData sfd = new ScanFilterData(); Long serviceUuid = beaconParser.getServiceUuid(); // Note: the -2 here is because we want the filter and mask to start after the // two-byte manufacturer code, and the beacon parser expression is based on offsets // from the start of the two byte code int length = endOffset + 1 - 2; byte[] filter = new byte[0]; byte[] mask = new byte[0]; if (length > 0) { filter = new byte[length]; mask = new byte[length]; for (int layoutIndex = 2; layoutIndex <= endOffset; layoutIndex++) { int filterIndex = layoutIndex-2; if (layoutIndex < startOffset) { filter[filterIndex] = 0; mask[filterIndex] = 0; } else { filter[filterIndex] = typeCodeBytes[layoutIndex-startOffset]; mask[filterIndex] = (byte) 0xff; } } } sfd.manufacturer = manufacturer; sfd.filter = filter; sfd.mask = mask; sfd.serviceUuid = serviceUuid; sfd.serviceUuid128Bit = beaconParser.getServiceUuid128Bit(); scanFilters.add(sfd); } return scanFilters; }
@Test public void testZeroOffsetScanFilter() throws Exception { org.robolectric.shadows.ShadowLog.stream = System.err; BeaconParser parser = new BeaconParser(); parser.setBeaconLayout("m:0-3=11223344,i:4-6,p:24-24"); BeaconManager.setManifestCheckingDisabled(true); // no manifest available in robolectric List<ScanFilterUtils.ScanFilterData> scanFilterDatas = new ScanFilterUtils().createScanFilterDataForBeaconParser(parser, null); assertEquals("scanFilters should be of correct size", 1, scanFilterDatas.size()); ScanFilterUtils.ScanFilterData sfd = scanFilterDatas.get(0); assertEquals("manufacturer should be right", 0x004c, sfd.manufacturer); assertEquals("mask length should be right", 2, sfd.mask.length); assertArrayEquals("mask should be right", new byte[] {(byte)0xff, (byte)0xff}, sfd.mask); assertArrayEquals("filter should be right", new byte[] {(byte)0x33, (byte)0x44}, sfd.filter); }
@Override public void setConfig(RedisClusterNode node, String param, String value) { RedisClient entry = getEntry(node); RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_SET, param, value); syncFuture(f); }
@Test public void testSetConfig() { RedisClusterNode master = getFirstMaster(); connection.setConfig(master, "timeout", "10"); }
public static IdGenerator incrementingLongs() { AtomicLong longs = new AtomicLong(); return () -> Long.toString(longs.incrementAndGet()); }
@Test public void incrementing() { IdGenerator gen = IdGenerators.incrementingLongs(); assertThat(gen.getId(), equalTo("1")); assertThat(gen.getId(), equalTo("2")); }
public void run() { try { InputStreamReader isr = new InputStreamReader( this.is ); BufferedReader br = new BufferedReader( isr ); String line = null; while ( ( line = br.readLine() ) != null ) { String logEntry = this.type + " " + line; switch ( this.logLevel ) { case MINIMAL: log.logMinimal( logEntry ); break; case BASIC: log.logBasic( logEntry ); break; case DETAILED: log.logDetailed( logEntry ); break; case DEBUG: log.logDebug( logEntry ); break; case ROWLEVEL: log.logRowlevel( logEntry ); break; case ERROR: log.logError( logEntry ); break; default: // NONE break; } } } catch ( IOException ioe ) { if ( log.isError() ) { log.logError( this.type + " " + Const.getStackTracker( ioe ) ); } } }
@Test public void testLogDetailed() { streamLogger = new ConfigurableStreamLogger( log, is, LogLevel.DETAILED, PREFIX ); streamLogger.run(); Mockito.verify( log ).logDetailed( OUT1 ); Mockito.verify( log ).logDetailed( OUT2 ); }
public static Gson instance() { return SingletonHolder.INSTANCE; }
@Test void rejectsSerializationOfDESEncrypter() { final IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> Serialization.instance().toJson(new DESEncrypter(mock(DESCipherProvider.class)))); assertEquals(format("Refusing to serialize a %s instance and leak security details!", DESEncrypter.class.getName()), e.getMessage()); }
public void invalidateMissingBlock(String bpid, Block block, boolean checkFiles) { // The replica seems is on its volume map but not on disk. // We can't confirm here is block file lost or disk failed. // If block lost: // deleted local block file is completely unnecessary // If disk failed: // deleted local block file here may lead to missing-block // when it with only 1 replication left now. // So remove if from volume map notify namenode is ok. try (AutoCloseableLock lock = lockManager.writeLock(LockLevel.BLOCK_POOl, bpid)) { // Check if this block is on the volume map. ReplicaInfo replica = volumeMap.get(bpid, block); // Double-check block or meta file existence when checkFiles as true. if (replica != null && (!checkFiles || (!replica.blockDataExists() || !replica.metadataExists()))) { volumeMap.remove(bpid, block); invalidate(bpid, replica); } } }
@Test public void testInvalidateMissingBlock() throws Exception { long blockSize = 1024; int heartbeatInterval = 1; HdfsConfiguration c = new HdfsConfiguration(); c.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, heartbeatInterval); c.setLong(DFS_BLOCK_SIZE_KEY, blockSize); MiniDFSCluster cluster = new MiniDFSCluster.Builder(c). numDataNodes(1).build(); try { cluster.waitActive(); DFSTestUtil.createFile(cluster.getFileSystem(), new Path("/a"), blockSize, (short)1, 0); String bpid = cluster.getNameNode().getNamesystem().getBlockPoolId(); DataNode dn = cluster.getDataNodes().get(0); FsDatasetImpl fsdataset = (FsDatasetImpl) dn.getFSDataset(); List<ReplicaInfo> replicaInfos = fsdataset.getFinalizedBlocks(bpid); assertEquals(1, replicaInfos.size()); ReplicaInfo replicaInfo = replicaInfos.get(0); String blockPath = replicaInfo.getBlockURI().getPath(); String metaPath = replicaInfo.getMetadataURI().getPath(); File blockFile = new File(blockPath); File metaFile = new File(metaPath); // Mock local block file not found when disk with some exception. fsdataset.invalidateMissingBlock(bpid, replicaInfo, false); // Assert local block file wouldn't be deleted from disk. assertTrue(blockFile.exists()); // Assert block info would be removed from ReplicaMap. assertEquals("null", fsdataset.getReplicaString(bpid, replicaInfo.getBlockId())); BlockManager blockManager = cluster.getNameNode(). getNamesystem().getBlockManager(); GenericTestUtils.waitFor(() -> blockManager.getLowRedundancyBlocksCount() == 1, 100, 5000); // Mock local block file found when disk back to normal. FsVolumeSpi.ScanInfo info = new FsVolumeSpi.ScanInfo( replicaInfo.getBlockId(), blockFile.getParentFile().getAbsoluteFile(), blockFile.getName(), metaFile.getName(), replicaInfo.getVolume()); fsdataset.checkAndUpdate(bpid, info); GenericTestUtils.waitFor(() -> blockManager.getLowRedundancyBlocksCount() == 0, 100, 5000); } finally { cluster.shutdown(); } }
@Override public void process(MetricsPacket.Builder builder) { String serviceIdValue = builder.getDimensionValue(toDimensionId(INTERNAL_SERVICE_ID)); if (serviceIdValue != null) builder.putDimension(toDimensionId(SERVICE_ID), serviceIdValue); }
@Test public void new_service_id_is_not_added_when_internal_service_id_is_null() { var builder = new MetricsPacket.Builder(toServiceId("foo")); var processor = new ServiceIdDimensionProcessor(); processor.process(builder); assertFalse(builder.getDimensionIds().contains(NEW_ID_DIMENSION)); }
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); }
@Test public void testLoadAndGetXml() throws Exception { ZipFileMeta zipFileMeta = new ZipFileMeta(); Node stepnode = getTestNode(); DatabaseMeta dbMeta = mock( DatabaseMeta.class ); IMetaStore metaStore = mock( IMetaStore.class ); StepMeta mockParentStepMeta = mock( StepMeta.class ); zipFileMeta.setParentStepMeta( mockParentStepMeta ); TransMeta mockTransMeta = mock( TransMeta.class ); NamedClusterEmbedManager embedManager = mock( NamedClusterEmbedManager.class ); when( mockParentStepMeta.getParentTransMeta() ).thenReturn( mockTransMeta ); when( mockTransMeta.getNamedClusterEmbedManager() ).thenReturn( embedManager ); zipFileMeta.loadXML( stepnode, Collections.singletonList( dbMeta ), metaStore ); assertXmlOutputMeta( zipFileMeta ); }
public void purgeBackupLog(UUID txnId) { txBackupLogs.remove(txnId); }
@Test public void purgeBackupLog_whenNotExist_thenIgnored() { txService.purgeBackupLog(TXN); }
public static void checkArgument(boolean isValid, String message) throws IllegalArgumentException { if (!isValid) { throw new IllegalArgumentException(message); } }
@Test public void testCheckArgumentWithTwoParams() { try { Preconditions.checkArgument(true, "Test message %s %s", 12, null); } catch (IllegalArgumentException e) { Assert.fail("Should not throw exception when isValid is true"); } try { Preconditions.checkArgument(false, "Test message %s %s", 12, null); Assert.fail("Should throw exception when isValid is false"); } catch (IllegalArgumentException e) { Assert.assertEquals("Should format message", "Test message 12 null", e.getMessage()); } }
@Override public boolean tryLock(String name) { return tryLock(name, DEFAULT_LOCK_DURATION_SECONDS); }
@Test @UseDataProvider("randomValidDuration") public void tryLock_with_duration_fails_with_IAE_if_name_is_empty(int randomValidDuration) { String badLockName = ""; expectBadLockNameIAE(() -> underTest.tryLock(badLockName, randomValidDuration), badLockName); }
public static void onUMengNotificationClick(Object UMessage) { if (UMessage == null) { SALog.i(TAG, "UMessage is null"); return; } if (!isTrackPushEnabled()) return; try { JSONObject raw = ReflectUtil.callMethod(UMessage, "getRaw"); if (raw == null) { SALog.i(TAG, "onUMengNotificationClick:raw is null"); return; } JSONObject body = raw.optJSONObject("body"); if (body != null) { String extra = raw.optString("extra"); String title = body.optString("title"); String content = body.optString("text"); String sfData = getSFData(extra); trackNotificationOpenedEvent(sfData, title, content, "UMeng", null); SALog.i(TAG, String.format("onUMengNotificationClick is called, title is %s, content is %s," + " extras is %s", title, content, extra)); } } catch (Exception e) { SALog.printStackTrace(e); } }
@Test public void onUMengNotificationClick() { PushAutoTrackHelper.onUMengNotificationClick(null); }
public static String toString(InetAddress inetAddress) { if (inetAddress instanceof Inet6Address) { String address = InetAddresses.toAddrString(inetAddress); // toAddrString() returns any interface/scope as a %-suffix, // see https://github.com/google/guava/commit/3f61870ac6e5b18dbb74ce6f6cb2930ad8750a43 int percentIndex = address.indexOf('%'); return percentIndex < 0 ? address : address.substring(0, percentIndex); } else { return inetAddress.getHostAddress(); } }
@Test void testToStringWithInterface() throws SocketException { NetworkInterface.networkInterfaces() .flatMap(NetworkInterface::inetAddresses) .forEach(inetAddress -> { String address = InetAddressUtil.toString(inetAddress); assertEquals(-1, address.indexOf('%'), "No interface in " + address); }); }