focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public void process(HttpResponse response, HttpContext context) throws HttpException, IOException { List<Header> warnings = Arrays.stream(response.getHeaders("Warning")).filter(header -> !this.isDeprecationMessage(header.getValue())).collect(Collectors.toList()); response.removeHeaders("Warning"); warnings.stream().forEach(header -> response.addHeader(header)); }
@Test public void testInterceptorSingleHeader() throws IOException, HttpException { ElasticsearchFilterDeprecationWarningsInterceptor interceptor = new ElasticsearchFilterDeprecationWarningsInterceptor(); HttpResponse response = new BasicHttpResponse(new BasicStatusLine(new ProtocolVersion("HTTP", 0, 0), 0, null)); response.addHeader("Test", "This header should not trigger the interceptor."); interceptor.process(response, null); assertThat(response.getAllHeaders()) .as("Number of Headers should be unchanged.") .hasSize(1); assertThat(response.getAllHeaders()[0].getName()) .as("Remaining Header should be same as the given.") .isEqualTo("Test"); }
@Override public Map<String, ScannerPlugin> installRequiredPlugins() { LOG.info("Loading required plugins"); InstallResult result = installPlugins(p -> p.getRequiredForLanguages() == null || p.getRequiredForLanguages().isEmpty()); LOG.debug("Plugins not loaded because they are optional: {}", result.skippedPlugins); return result.installedPluginsByKey; }
@Test public void fail_if_plugin_not_found_two_times() throws IOException { WsTestUtil.mockReader(wsClient, "api/plugins/installed", new InputStreamReader(getClass().getResourceAsStream("ScannerPluginInstallerTest/blue-installed.json")), new InputStreamReader(getClass().getResourceAsStream("ScannerPluginInstallerTest/green-installed.json"))); enqueueDownload("scmgit", "abc"); enqueueDownload("cobol", "ghi"); enqueueNotFoundDownload("java", "def"); assertThatThrownBy(() -> underTest.installRequiredPlugins()) .isInstanceOf(IllegalStateException.class) .hasMessage("Fail to download plugin [java]. Not found."); }
public String stringify(boolean value) { throw new UnsupportedOperationException( "stringify(boolean) was called on a non-boolean stringifier: " + toString()); }
@Test public void testTimeNanoStringifier() { for (PrimitiveStringifier stringifier : asList(TIME_NANOS_STRINGIFIER, TIME_NANOS_UTC_STRINGIFIER)) { String timezoneAmendment = (stringifier == TIME_NANOS_STRINGIFIER ? "" : "+0000"); assertEquals(withZoneString("00:00:00.000000000", timezoneAmendment), stringifier.stringify(0l)); assertEquals( withZoneString("12:34:56.789012987", timezoneAmendment), stringifier.stringify(convert(NANOSECONDS, 12, 34, 56, 789012987))); assertEquals( withZoneString("-12:34:56.000789012", timezoneAmendment), stringifier.stringify(convert(NANOSECONDS, -12, -34, -56, -789012))); assertEquals( withZoneString("12345:12:34.000056789", timezoneAmendment), stringifier.stringify(convert(NANOSECONDS, 12345, 12, 34, 56789))); assertEquals( withZoneString("-12345:12:34.000056789", timezoneAmendment), stringifier.stringify(convert(NANOSECONDS, -12345, -12, -34, -56789))); checkThrowingUnsupportedException(stringifier, Integer.TYPE, Long.TYPE); } }
@SuppressWarnings("unchecked") public <X> TypeDescriptor<T> where( TypeParameter<X> typeParameter, TypeDescriptor<X> typeDescriptor) { return where(typeParameter.typeVariable, typeDescriptor.getType()); }
@Test public void testWhere() throws Exception { useWhereMethodToDefineTypeParam(new TypeDescriptor<String>() {}); }
public static SubscriptionPath subscriptionPathFromName( String projectId, String subscriptionName) { return new SubscriptionPath( String.format("projects/%s/subscriptions/%s", projectId, subscriptionName)); }
@Test public void subscriptionPathFromNameWellFormed() { SubscriptionPath path = PubsubClient.subscriptionPathFromName("test", "something"); assertEquals("projects/test/subscriptions/something", path.getPath()); assertEquals("/subscriptions/test/something", path.getFullPath()); assertEquals(ImmutableList.of("test", "something"), path.getDataCatalogSegments()); }
public AfnemersberichtAanDGL createAfnemersberichtAanDGL() { AfnemersberichtAanDGL result = new AfnemersberichtAanDGL(); BerichtHeaderType berichtHeader = new BerichtHeaderType(); berichtHeader.setOntvangerId(digileveringOIN); berichtHeader.setVerstrekkerId(digidOIN); berichtHeader.setDatumtijdstempelVerstrekker(getCurrentTime()); berichtHeader.setKenmerkVerstrekker(UUID.randomUUID().toString() + "@digid.nl"); berichtHeader.setBerichtversie("1.0"); result.setBerichtHeader(berichtHeader); AfnemersberichtAanDGL.Stuurgegevens stuurgegevens = new AfnemersberichtAanDGL.Stuurgegevens(); VersiebeheerType berichtsoort = new VersiebeheerType(); stuurgegevens.setBerichtsoort(berichtsoort); stuurgegevens.setVersieBerichttype("3.10"); result.setStuurgegevens(stuurgegevens); result.setInhoud(new AfnemersInhoudType()); return result; }
@Test public void testcreateAfnemersberichtAanDGLAp01(){ Ap01 testAp01 = dglMessageFactory.createAp01("SSSSSSSSS"); AfnemersberichtAanDGL result = classUnderTest.createAfnemersberichtAanDGL(testAp01); assertEquals("Ap01", result.getStuurgegevens().getBerichtsoort().getNaam()); assertEquals("1.0", result.getStuurgegevens().getBerichtsoort().getVersie()); assertEquals(testAp01, result.getInhoud().getAp01()); }
@Override public Validation validate(Validation val) { if (StringUtils.isBlank(systemEnvironment.getPropertyImpl("jetty.home"))) { systemEnvironment.setProperty("jetty.home", systemEnvironment.getPropertyImpl("user.dir")); } systemEnvironment.setProperty("jetty.base", systemEnvironment.getPropertyImpl("jetty.home")); File home = new File(systemEnvironment.getPropertyImpl("jetty.home")); File work = new File(systemEnvironment.getPropertyImpl("jetty.home"), "work"); if (home.exists()) { if (work.exists()) { try { FileUtils.deleteDirectory(work); } catch (IOException e) { String message = format("Error trying to remove Jetty working directory {0}: {1}", work.getAbsolutePath(), e); return val.addError(new RuntimeException(message)); } } work.mkdir(); } return Validation.SUCCESS; }
@Test public void shouldNotCreateTheJettyHomeDirIfItDoesNotExist() { String jettyHome = "home_dir"; when(systemEnvironment.getPropertyImpl("jetty.home")).thenReturn(jettyHome); Validation val = new Validation(); jettyWorkDirValidator.validate(val); assertThat(val.isSuccessful(), is(true)); assertThat(new File(jettyHome).exists(), is(false)); }
@Override public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer) { return aggregate(initializer, Materialized.with(null, null)); }
@Test public void timeWindowAggregateTestStreamsTest() { final KTable<Windowed<String>, String> customers = windowedCogroupedStream.aggregate( MockInitializer.STRING_INIT, Materialized.with(Serdes.String(), Serdes.String())); customers.toStream().to(OUTPUT); try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) { final TestInputTopic<String, String> testInputTopic = driver.createInputTopic( TOPIC, new StringSerializer(), new StringSerializer()); final TestOutputTopic<Windowed<String>, String> testOutputTopic = driver.createOutputTopic( OUTPUT, new TimeWindowedDeserializer<>(new StringDeserializer(), WINDOW_SIZE), new StringDeserializer()); testInputTopic.pipeInput("k1", "A", 0); testInputTopic.pipeInput("k2", "A", 0); testInputTopic.pipeInput("k2", "A", 1); testInputTopic.pipeInput("k1", "A", 2); testInputTopic.pipeInput("k1", "B", 3); testInputTopic.pipeInput("k2", "B", 3); testInputTopic.pipeInput("k2", "B", 4); testInputTopic.pipeInput("k1", "B", 4); assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0+A", 0); assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0+A", 0); assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0+A+A", 1); assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0+A+A", 2); assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0+A+A+B", 3); assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0+A+A+B", 3); assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0+A+A+B+B", 4); assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0+A+A+B+B", 4); } }
@Override @Description("The specification of the Kafka and ZooKeeper clusters, and Topic Operator.") public KafkaSpec getSpec() { return super.getSpec(); }
@Test public void testListeners() { Kafka model = TestUtils.fromYaml("Kafka" + ".yaml", Kafka.class); assertThat(model.getSpec().getKafka().getListeners(), is(notNullValue())); assertThat(model.getSpec().getKafka().getListeners().size(), is(2)); List<GenericKafkaListener> listeners = model.getSpec().getKafka().getListeners(); assertThat(listeners.get(0).getAuth().getType(), is("scram-sha-512")); assertThat(listeners.get(1).getAuth().getType(), is("tls")); }
@Override public boolean betterThan(Num criterionValue1, Num criterionValue2) { return criterionValue1.isLessThan(criterionValue2); }
@Test public void betterThan() { AnalysisCriterion criterion = new LinearTransactionCostCriterion(1000, 0.5); assertTrue(criterion.betterThan(numOf(3.1), numOf(4.2))); assertFalse(criterion.betterThan(numOf(2.1), numOf(1.9))); }
@Override public NoticeDO getNotice(Long id) { return noticeMapper.selectById(id); }
@Test public void testGetNotice_success() { // 插入前置数据 NoticeDO dbNotice = randomPojo(NoticeDO.class); noticeMapper.insert(dbNotice); // 查询 NoticeDO notice = noticeService.getNotice(dbNotice.getId()); // 验证插入与读取对象是否一致 assertNotNull(notice); assertPojoEquals(dbNotice, notice); }
@Override public void write(int b) { ensureAvailable(1); buffer[pos++] = (byte) (b); }
@Test public void testWriteForPositionB() { out.write(1, 5); assertEquals(5, out.buffer[1]); }
@Override public V random() { return get(randomAsync()); }
@Test public void testRandom() { RScoredSortedSet<Integer> set = redisson.getScoredSortedSet("test"); set.add(1, 10); set.add(2, 20); set.add(3, 30); assertThat(set.random()).isIn(10, 20, 30); assertThat(set.random(2)).containsAnyOf(10, 20, 30).hasSize(2); Map<Integer, Double> map = set.randomEntries(2); assertThat(map).containsAnyOf(entry(10, 1D), entry(20, 2D), entry(30, 3D)).hasSize(2); }
public static Collection<Object> accumulateValues(DataIterator it, Collection<Object> accumulator) { for(DataElement element = it.next(); element !=null; element = it.next()) { accumulator.add(element.getValue()); } return accumulator; }
@Test public void testAccumulateByPathAndFilter() throws Exception { SimpleTestData data = IteratorTestData.createSimpleTestData(); List<Object> ids = new ArrayList<>( Builder.create(data.getDataElement(), IterationOrder.PRE_ORDER) .filterBy(Predicates.and(Predicates.pathMatchesPathSpec(IteratorTestData.PATH_TO_ID), IteratorTestData.LESS_THAN_3_CONDITION)) .accumulateValues()); assertEquals(2, ids.size()); assertTrue(ids.contains(1)); assertTrue(ids.contains(2)); }
static Optional<Object> coerceParam(Class<?> currentIdxActualParameterType, Class<?> expectedParameterType, Object actualObject) { /* 10.3.2.9.4 Type conversions from singleton list: When the type of the expression is List<T>, the value of the expression is a singleton list and the target type is T, the expression is converted by unwrapping the first element. */ if (Collection.class.isAssignableFrom(currentIdxActualParameterType)) { Collection<?> valueCollection = (Collection<?>) actualObject; if (valueCollection.size() == 1) { Object singletonValue = valueCollection.iterator().next(); // re-perform the assignable-from check, this time using the element itself the singleton value from // the original parameter list if (singletonValue != null) { return expectedParameterType.isAssignableFrom(singletonValue.getClass()) ? Optional.of(singletonValue) : coerceParam(singletonValue.getClass(), expectedParameterType, singletonValue); } } } /* to singleton list: When the type of the expression is T and the target type is List<T> the expression is converted to a singleton list. */ if (!Collection.class.isAssignableFrom(currentIdxActualParameterType) && Collection.class.isAssignableFrom(expectedParameterType)) { Object singletonValue = coerceParam(currentIdxActualParameterType, currentIdxActualParameterType, actualObject) .orElse(actualObject); return Optional.of(Collections.singletonList(singletonValue)); } /* from date to date and time When the type of the expression is date and the target type is date and time, the expression is converted to a date time value in which the time of day is UTC midnight (00:00:00) */ if (actualObject instanceof LocalDate localDate && ZonedDateTime.class.isAssignableFrom(expectedParameterType)) { Object coercedObject = DateTimeEvalHelper.coerceDateTime(localDate); return Optional.of(coercedObject); } return Optional.empty(); }
@Test void coerceParam() { // Coerce List to singleton Class<?> currentIdxActualParameterType = List.class; Class<?> expectedParameterType = Number.class; Object valueObject = 34; Object actualObject = List.of(valueObject); Optional<Object> retrieved = CoerceUtil.coerceParam(currentIdxActualParameterType, expectedParameterType, actualObject); assertNotNull(retrieved); assertTrue(retrieved.isPresent()); assertEquals(valueObject, retrieved.get()); // Coerce single element to singleton list currentIdxActualParameterType = Number.class; expectedParameterType = List.class; actualObject = 34; retrieved = CoerceUtil.coerceParam(currentIdxActualParameterType, expectedParameterType, actualObject); assertNotNull(retrieved); assertTrue(retrieved.isPresent()); assertTrue(retrieved.get() instanceof List); List lstRetrieved = (List) retrieved.get(); assertEquals(1, lstRetrieved.size()); assertEquals(actualObject, lstRetrieved.get(0)); // Coerce date to date and time actualObject = LocalDate.now(); currentIdxActualParameterType = LocalDate.class; expectedParameterType = ZonedDateTime.class; retrieved = CoerceUtil.coerceParam(currentIdxActualParameterType, expectedParameterType, actualObject); assertNotNull(retrieved); assertTrue(retrieved.isPresent()); assertTrue(retrieved.get() instanceof ZonedDateTime); ZonedDateTime zdtRetrieved = (ZonedDateTime) retrieved.get(); assertEquals(actualObject, zdtRetrieved.toLocalDate()); assertEquals(ZoneOffset.UTC, zdtRetrieved.getOffset()); assertEquals(0, zdtRetrieved.getHour()); assertEquals(0, zdtRetrieved.getMinute()); assertEquals(0, zdtRetrieved.getSecond()); }
public int configuredMetadataVersion(final int priorVersion) { final String upgradeFrom = streamsConfig.getString(StreamsConfig.UPGRADE_FROM_CONFIG); if (upgradeFrom != null) { switch (UpgradeFromValues.fromString(upgradeFrom)) { case UPGRADE_FROM_0100: log.info( "Downgrading metadata.version from {} to 1 for upgrade from 0.10.0.x.", LATEST_SUPPORTED_VERSION ); return 1; case UPGRADE_FROM_0101: case UPGRADE_FROM_0102: case UPGRADE_FROM_0110: case UPGRADE_FROM_10: case UPGRADE_FROM_11: log.info( "Downgrading metadata.version from {} to 2 for upgrade from {}.x.", LATEST_SUPPORTED_VERSION, upgradeFrom ); return 2; case UPGRADE_FROM_20: case UPGRADE_FROM_21: case UPGRADE_FROM_22: case UPGRADE_FROM_23: // These configs are for cooperative rebalancing and should not affect the metadata version break; case UPGRADE_FROM_24: case UPGRADE_FROM_25: case UPGRADE_FROM_26: case UPGRADE_FROM_27: case UPGRADE_FROM_28: case UPGRADE_FROM_30: case UPGRADE_FROM_31: case UPGRADE_FROM_32: case UPGRADE_FROM_33: case UPGRADE_FROM_34: case UPGRADE_FROM_35: case UPGRADE_FROM_36: case UPGRADE_FROM_37: case UPGRADE_FROM_38: // we need to add new version when new "upgrade.from" values become available // This config is for explicitly sending FK response to a requested partition // and should not affect the metadata version break; default: throw new IllegalArgumentException( "Unknown configuration value for parameter 'upgrade.from': " + upgradeFrom ); } } return priorVersion; }
@Test public void configuredMetadataVersionShouldSupportAllUpgradeFromVersions() { for (final UpgradeFromValues upgradeFrom : UpgradeFromValues.values()) { config.put(StreamsConfig.UPGRADE_FROM_CONFIG, upgradeFrom.toString()); final AssignorConfiguration assignorConfiguration = new AssignorConfiguration(config); try { assignorConfiguration.configuredMetadataVersion(0); } catch (final Exception error) { throw new AssertionError("Upgrade from " + upgradeFrom + " failed with " + error.getMessage() + "!"); } } }
public static byte[] ecdh(XECPrivateKey privateKey, XECPublicKey publicKey) { try { var keyAgreement = KeyAgreement.getInstance("XDH"); keyAgreement.init(privateKey); keyAgreement.doPhase(publicKey, true); byte[] sharedSecret = keyAgreement.generateSecret(); // RFC 7748 recommends checking that the shared secret is not all zero bytes. // Furthermore, RFC 9180 states "For X25519 and X448, public keys and Diffie-Hellman // outputs MUST be validated as described in [RFC7748]". // Usually we won't get here at all since Java will throw an InvalidKeyException // from detecting a key with a low order point. But in case we _do_ get here, fail fast. if (SideChannelSafe.allZeros(sharedSecret)) { throw new IllegalArgumentException("Computed shared secret is all zeroes"); } return sharedSecret; } catch (NoSuchAlgorithmException | InvalidKeyException e) { throw new RuntimeException(e); } }
@Test void x25519_ecdh_matches_rfc_7748_test_vector() { var alice_priv = xecPrivFromHex("77076d0a7318a57d3c16c17251b26645df4c2f87ebc0992ab177fba51db92c2a"); var alice_pub = xecPubFromHex( "8520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a"); var bob_priv = xecPrivFromHex("5dab087e624a8a4b79e17f8b83800ee66f3bb1292618b6fd1c2f8b27ff88e0eb"); var bob_public = xecPubFromHex( "de9edb7d7b7dc1b4d35b61c2ece435373f8343c85b78674dadfc7e146f882b4f"); var expectedShared = "4a5d9d5ba4ce2de1728e3bf480350f25e07e21c947d19e3376f09b3c1e161742"; byte[] sharedAliceToBob = KeyUtils.ecdh(alice_priv, bob_public); assertEquals(expectedShared, hex(sharedAliceToBob)); byte[] sharedBobToAlice = KeyUtils.ecdh(bob_priv, alice_pub); assertEquals(expectedShared, hex(sharedBobToAlice)); }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "list" ) List list) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } FEELFnResult<BigDecimal> s = sum.invoke( list ); Function<FEELEvent, FEELFnResult<BigDecimal>> ifLeft = event -> FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "unable to sum the elements which is required to calculate the mean")); Function<BigDecimal, FEELFnResult<BigDecimal>> ifRight = (sum) -> { try { return FEELFnResult.ofResult( sum.divide( BigDecimal.valueOf( list.size() ), MathContext.DECIMAL128 ) ); } catch (Exception e) { return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to perform division to calculate the mean", e) ); } }; return s.cata(ifLeft, ifRight); }
@Test void invokeArrayNull() { FunctionTestUtil.assertResultError(meanFunction.invoke((Object[]) null), InvalidParametersEvent.class); }
public static String generateApply( final String inputCode, final String mapperCode, final Class<?> returnType ) { return "((" + returnType.getSimpleName() + ")" + NullSafe.class.getSimpleName() + ".apply(" + inputCode + "," + mapperCode + "))"; }
@Test public void shouldGenerateApply() { // Given: final String mapperCode = LambdaUtil .toJavaCode("val", Long.class, "val.longValue() + 1"); // When: final String javaCode = NullSafe .generateApply("arguments.get(\"input\")", mapperCode, Long.class); // Then: final Evaluator evaluator = CodeGenTestUtil.cookCode(javaCode, Long.class); assertThat(evaluator.evaluate("input", 10L), is(11L)); assertThat(evaluator.evaluate("input", null), is(nullValue())); }
@Override protected void validateDataImpl(TenantId tenantId, Dashboard dashboard) { validateString("Dashboard title", dashboard.getTitle()); if (dashboard.getTenantId() == null) { throw new DataValidationException("Dashboard should be assigned to tenant!"); } else { if (!tenantService.tenantExists(dashboard.getTenantId())) { throw new DataValidationException("Dashboard is referencing to non-existent tenant!"); } } }
@Test void testValidateNameInvocation() { Dashboard dashboard = new Dashboard(); dashboard.setTitle("flight control"); dashboard.setTenantId(tenantId); validator.validateDataImpl(tenantId, dashboard); verify(validator).validateString("Dashboard title", dashboard.getTitle()); }
@HighFrequencyInvocation(canBeCached = true) @Override public Connection getConnection() { return DriverStateContext.getConnection(databaseName, contextManager); }
@Test void assertGetConnectionWithUsernameAndPassword() throws Exception { Connection connection = mock(Connection.class, RETURNS_DEEP_STUBS); when(connection.getMetaData().getURL()).thenReturn("jdbc:mock://127.0.0.1/foo_ds"); try (ShardingSphereDataSource actual = createShardingSphereDataSource(new MockedDataSource(connection))) { assertThat(((ShardingSphereConnection) actual.getConnection("", "")).getDatabaseConnectionManager().getConnections(DefaultDatabase.LOGIC_NAME, "ds", 0, 1, ConnectionMode.MEMORY_STRICTLY) .get(0), is(connection)); } }
@Override public List<String> listTables(String databaseName) throws CatalogException, DatabaseNotExistException { List<String> tables = catalog.listTables(Namespace.of(databaseName)).stream() .map(tableIdentifier -> toTablePath(tableIdentifier).getTableName()) .collect(Collectors.toList()); log.info("Fetched {} tables.", tables.size()); return tables; }
@Test @Order(5) void listTables() { Assertions.assertTrue(icebergCatalog.listTables(databaseName).contains(tableName)); }
@Override public void processElement(StreamRecord<RowData> element) throws Exception { RowData inputRow = element.getValue(); long timestamp; if (windowAssigner.isEventTime()) { if (inputRow.isNullAt(rowtimeIndex)) { // null timestamp would be dropped numNullRowTimeRecordsDropped.inc(); return; } timestamp = inputRow.getTimestamp(rowtimeIndex, 3).getMillisecond(); } else { timestamp = getProcessingTimeService().getCurrentProcessingTime(); } timestamp = toUtcTimestampMills(timestamp, shiftTimeZone); Collection<TimeWindow> elementWindows = windowAssigner.assignWindows(inputRow, timestamp); collect(inputRow, elementWindows); }
@Test public void testProcessingTimeTumblingWindows() throws Exception { final TumblingWindowAssigner assigner = TumblingWindowAssigner.of(Duration.ofSeconds(3)).withProcessingTime(); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(assigner, shiftTimeZone); testHarness.setup(OUT_SERIALIZER); testHarness.open(); // process elements ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); // timestamp is ignored in processing time testHarness.setProcessingTime(20L); testHarness.processElement(insertRecord("key1", 1, Long.MAX_VALUE)); testHarness.setProcessingTime(3999L); testHarness.processElement(insertRecord("key2", 1, Long.MAX_VALUE)); // append 3 fields: window_start, window_end, window_time expectedOutput.add( insertRecord("key1", 1, Long.MAX_VALUE, localMills(0L), localMills(3000L), 2999L)); expectedOutput.add( insertRecord( "key2", 1, Long.MAX_VALUE, localMills(3000L), localMills(6000L), 5999L)); ASSERTER.assertOutputEqualsSorted( "Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.close(); }
public void incBrokerGetNumsWithoutSystemTopic(final String topic, final int incValue) { if (TopicValidator.isSystemTopic(topic)) { return; } this.statsTable.get(BROKER_GET_NUMS_WITHOUT_SYSTEM_TOPIC).getAndCreateStatsItem(this.clusterName).getValue().add(incValue); }
@Test public void testIncBrokerGetNumsWithoutSystemTopic() { brokerStatsManager.incBrokerGetNumsWithoutSystemTopic(TOPIC, 1); assertThat(brokerStatsManager.getStatsItem(BrokerStatsManager.BROKER_GET_NUMS_WITHOUT_SYSTEM_TOPIC, CLUSTER_NAME) .getValue().doubleValue()).isEqualTo(1L); assertThat(brokerStatsManager.getBrokerGetNumsWithoutSystemTopic()).isEqualTo(1L); brokerStatsManager.incBrokerGetNumsWithoutSystemTopic(TopicValidator.RMQ_SYS_TRACE_TOPIC, 1); assertThat(brokerStatsManager.getStatsItem(BrokerStatsManager.BROKER_GET_NUMS_WITHOUT_SYSTEM_TOPIC, CLUSTER_NAME) .getValue().doubleValue()).isEqualTo(1L); assertThat(brokerStatsManager.getBrokerGetNumsWithoutSystemTopic()).isEqualTo(1L); }
public Builder toBuilder() { Builder result = new Builder(); result.flags = flags; result.traceIdHigh = traceIdHigh; result.traceId = traceId; return result; }
@Test void canUsePrimitiveOverloads_true() { TraceIdContext primitives = base.toBuilder() .sampled(true) .debug(true) .build(); TraceIdContext objects = base.toBuilder() .sampled(Boolean.TRUE) .debug(Boolean.TRUE) .build(); assertThat(primitives) .isEqualToComparingFieldByField(objects); assertThat(primitives.debug()) .isTrue(); assertThat(primitives.sampled()) .isTrue(); }
@Override public Set<KubevirtFloatingIp> floatingIps() { return ImmutableSet.copyOf(kubevirtRouterStore.floatingIps()); }
@Test public void testGetFloatingIps() { createBasicFloatingIpDisassociated(); assertEquals("Number of floating IPs did not match", 1, target.floatingIps().size()); }
@Override public CRParseResult responseMessageForParseDirectory(String responseBody) { ErrorCollection errors = new ErrorCollection(); try { ResponseScratch responseMap = parseResponseForMigration(responseBody); ParseDirectoryResponseMessage parseDirectoryResponseMessage; if (responseMap.target_version == null) { errors.addError("Plugin response message", "missing 'target_version' field"); return new CRParseResult(errors); } else if (responseMap.target_version > CURRENT_CONTRACT_VERSION) { String message = String.format("'target_version' is %s but the GoCD Server supports %s", responseMap.target_version, CURRENT_CONTRACT_VERSION); errors.addError("Plugin response message", message); return new CRParseResult(errors); } else { int version = responseMap.target_version; while (version < CURRENT_CONTRACT_VERSION) { version++; responseBody = migrate(responseBody, version); } // after migration, json should match contract parseDirectoryResponseMessage = codec.getGson().fromJson(responseBody, ParseDirectoryResponseMessage.class); parseDirectoryResponseMessage.validateResponse(errors); errors.addErrors(parseDirectoryResponseMessage.getPluginErrors()); return new CRParseResult(parseDirectoryResponseMessage.getEnvironments(), parseDirectoryResponseMessage.getPipelines(), errors); } } catch (Exception ex) { StringBuilder builder = new StringBuilder(); builder.append("Unexpected error when handling plugin response").append('\n'); builder.append(ex); // "location" of error is runtime. This is what user will see in config repo errors list. errors.addError("runtime", builder.toString()); LOGGER.error(builder.toString(), ex); return new CRParseResult(errors); } }
@Test public void shouldNotErrorWhenTargetVersionInResponse() { String json = """ { "target_version" : 1, "pipelines" : [], "errors" : [] }"""; makeMigratorReturnSameJSON(); CRParseResult result = handler.responseMessageForParseDirectory(json); assertThat(result.hasErrors()).isFalse(); }
static void setTableInputInformation( TableInput.Builder tableInputBuilder, TableMetadata metadata) { setTableInputInformation(tableInputBuilder, metadata, null); }
@Test public void testSetTableInputInformation() { // Actual TableInput TableInput.Builder actualTableInputBuilder = TableInput.builder(); Schema schema = new Schema( Types.NestedField.required(1, "x", Types.StringType.get(), "comment1"), Types.NestedField.required( 2, "y", Types.StructType.of(Types.NestedField.required(3, "z", Types.IntegerType.get())), "comment2")); PartitionSpec partitionSpec = PartitionSpec.builderFor(schema).identity("x").withSpecId(1000).build(); TableMetadata tableMetadata = TableMetadata.newTableMetadata(schema, partitionSpec, "s3://test", tableLocationProperties); IcebergToGlueConverter.setTableInputInformation(actualTableInputBuilder, tableMetadata); TableInput actualTableInput = actualTableInputBuilder.build(); // Expected TableInput TableInput expectedTableInput = TableInput.builder() .storageDescriptor( StorageDescriptor.builder() .location("s3://test") .additionalLocations(Sets.newHashSet(tableLocationProperties.values())) .columns( ImmutableList.of( Column.builder() .name("x") .type("string") .comment("comment1") .parameters( ImmutableMap.of( IcebergToGlueConverter.ICEBERG_FIELD_ID, "1", IcebergToGlueConverter.ICEBERG_FIELD_OPTIONAL, "false", IcebergToGlueConverter.ICEBERG_FIELD_CURRENT, "true")) .build(), Column.builder() .name("y") .type("struct<z:int>") .comment("comment2") .parameters( ImmutableMap.of( IcebergToGlueConverter.ICEBERG_FIELD_ID, "2", IcebergToGlueConverter.ICEBERG_FIELD_OPTIONAL, "false", IcebergToGlueConverter.ICEBERG_FIELD_CURRENT, "true")) .build())) .build()) .build(); assertThat(actualTableInput.storageDescriptor().additionalLocations()) .as("additionalLocations should match") .isEqualTo(expectedTableInput.storageDescriptor().additionalLocations()); assertThat(actualTableInput.storageDescriptor().location()) .as("Location should match") .isEqualTo(expectedTableInput.storageDescriptor().location()); assertThat(actualTableInput.storageDescriptor().columns()) .as("Columns should match") .isEqualTo(expectedTableInput.storageDescriptor().columns()); }
public SimulationRunMetadata build() { int available = 0; Map<String, Integer> outputCounter = new HashMap<>(); Map<ScenarioWithIndex, Map<String, Integer>> scenarioCounter = new HashMap<>(); AuditLog auditLog = new AuditLog(); for (ScenarioResultMetadata scenarioResultMetadatum : scenarioResultMetadata) { // this value is the same for all the scenarios available = scenarioResultMetadatum.getAvailable().size(); scenarioResultMetadatum.getExecutedWithCounter() .forEach((name, counter) -> outputCounter.compute(name, (key, number) -> number == null ? counter : number + counter)); } for (ScenarioResultMetadata scenarioResultMetadatum : scenarioResultMetadata) { scenarioCounter.put(scenarioResultMetadatum.getScenarioWithIndex(), scenarioResultMetadatum.getExecutedWithCounter()); auditLog.addAuditLogLines(scenarioResultMetadatum.getAuditLogLines()); } return new SimulationRunMetadata(available, outputCounter.keySet().size(), outputCounter, scenarioCounter, auditLog); }
@Test public void build() { ScenarioWithIndex scenarioWithIndex1 = new ScenarioWithIndex(1, new Scenario()); ScenarioResultMetadata result1 = new ScenarioResultMetadata(scenarioWithIndex1); result1.addExecuted("d1"); result1.addExecuted("d2"); result1.addAvailable("d1"); result1.addAvailable("d2"); result1.addAvailable("d3"); List<DMNMessage> messagesResult1decision1 = getRandomlyGeneratedDMNMessageList(); List<DMNMessage> messagesResult1decision2 = getRandomlyGeneratedDMNMessageList(); Map<Integer, List<String>> expectedResult1Decision1 = fillAuditMessagesForDecision(result1, messagesResult1decision1, "d1"); Map<Integer, List<String>> expectedResult1Decision2 = fillAuditMessagesForDecision(result1, messagesResult1decision2, "d2"); ScenarioResultMetadata result2 = new ScenarioResultMetadata(new ScenarioWithIndex(2, new Scenario())); List<String> expectedDecisionsResult2 = List.of("d1", "d3"); result2.addExecuted(expectedDecisionsResult2.get(0)); result2.addExecuted(expectedDecisionsResult2.get(1)); result2.addAvailable("d1"); result2.addAvailable("d2"); result2.addAvailable("d3"); List<DMNMessage> messagesResult2decision1 = getRandomlyGeneratedDMNMessageList(); List<DMNMessage> messagesResult2decision3 = getRandomlyGeneratedDMNMessageList(); Map<Integer, List<String>> expectedResult2Decision1 = fillAuditMessagesForDecision(result1, messagesResult2decision1, "d1"); Map<Integer, List<String>> expectedResult2Decision3 = fillAuditMessagesForDecision(result1, messagesResult2decision3, "d3"); SimulationRunMetadataBuilder builder = SimulationRunMetadataBuilder.create(); builder.addScenarioResultMetadata(result1); builder.addScenarioResultMetadata(result2); SimulationRunMetadata build = builder.build(); assertThat(build.getAvailable()).isEqualTo(3); assertThat(build.getExecuted()).isEqualTo(3); assertThat(build.getCoveragePercentage()).isCloseTo(100, within(0.1)); assertThat(build.getOutputCounter()).containsEntry("d1", 2).containsEntry("d2", 1); assertThat(build.getScenarioCounter().get(scenarioWithIndex1)).hasSize(2); AuditLog retrieved = build.getAuditLog(); assertThat(retrieved).isNotNull(); final List<AuditLogLine> auditLogLines = retrieved.getAuditLogLines(); assertThat(auditLogLines).isNotNull().hasSize(messagesResult1decision1.size() + messagesResult1decision2.size() + messagesResult2decision1.size() + messagesResult2decision3.size()); checkAuditLogLine(auditLogLines, expectedResult1Decision1, expectedResult1Decision2, expectedResult2Decision1, expectedResult2Decision3); }
@Override protected void copy(List<S3ResourceId> sourcePaths, List<S3ResourceId> destinationPaths) throws IOException { checkArgument( sourcePaths.size() == destinationPaths.size(), "sizes of sourcePaths and destinationPaths do not match"); Stream.Builder<Callable<Void>> tasks = Stream.builder(); Iterator<S3ResourceId> sourcePathsIterator = sourcePaths.iterator(); Iterator<S3ResourceId> destinationPathsIterator = destinationPaths.iterator(); while (sourcePathsIterator.hasNext()) { final S3ResourceId sourcePath = sourcePathsIterator.next(); final S3ResourceId destinationPath = destinationPathsIterator.next(); tasks.add( () -> { copy(sourcePath, destinationPath); return null; }); } callTasks(tasks.build()); }
@Test public void testCopy() throws IOException { testCopy(s3Config("s3")); testCopy(s3Config("other")); testCopy(s3ConfigWithSSECustomerKey("s3")); testCopy(s3ConfigWithSSECustomerKey("other")); }
public Account updatePniKeys(final Account account, final IdentityKey pniIdentityKey, final Map<Byte, ECSignedPreKey> deviceSignedPreKeys, @Nullable final Map<Byte, KEMSignedPreKey> devicePqLastResortPreKeys, final List<IncomingMessage> deviceMessages, final Map<Byte, Integer> pniRegistrationIds) throws MismatchedDevicesException, StaleDevicesException { validateDeviceMessages(account, deviceMessages); // Don't try to be smart about ignoring unnecessary retries. If we make literally no change we will skip the ddb // write anyway. Linked devices can handle some wasted extra key rotations. final Account updatedAccount = accountsManager.updatePniKeys( account, pniIdentityKey, deviceSignedPreKeys, devicePqLastResortPreKeys, pniRegistrationIds); sendDeviceMessages(updatedAccount, deviceMessages); return updatedAccount; }
@Test void updatePniKeysSetPrimaryDevicePrekeyAndSendMessages() throws Exception { final UUID aci = UUID.randomUUID(); final UUID pni = UUID.randomUUID(); final Account account = mock(Account.class); when(account.getUuid()).thenReturn(aci); when(account.getPhoneNumberIdentifier()).thenReturn(pni); final Device d2 = mock(Device.class); final byte deviceId2 = 2; when(d2.getId()).thenReturn(deviceId2); when(account.getDevice(deviceId2)).thenReturn(Optional.of(d2)); when(account.getDevices()).thenReturn(List.of(d2)); final ECKeyPair pniIdentityKeyPair = Curve.generateKeyPair(); final IdentityKey pniIdentityKey = new IdentityKey(pniIdentityKeyPair.getPublicKey()); final Map<Byte, ECSignedPreKey> prekeys = Map.of(Device.PRIMARY_ID, KeysHelper.signedECPreKey(1, pniIdentityKeyPair), deviceId2, KeysHelper.signedECPreKey(2, pniIdentityKeyPair)); final Map<Byte, Integer> registrationIds = Map.of(Device.PRIMARY_ID, 17, deviceId2, 19); final IncomingMessage msg = mock(IncomingMessage.class); when(msg.destinationDeviceId()).thenReturn(deviceId2); when(msg.content()).thenReturn(Base64.getEncoder().encodeToString(new byte[]{1})); changeNumberManager.updatePniKeys(account, pniIdentityKey, prekeys, null, List.of(msg), registrationIds); verify(accountsManager).updatePniKeys(account, pniIdentityKey, prekeys, null, registrationIds); final ArgumentCaptor<MessageProtos.Envelope> envelopeCaptor = ArgumentCaptor.forClass(MessageProtos.Envelope.class); verify(messageSender).sendMessage(any(), eq(d2), envelopeCaptor.capture(), eq(false)); final MessageProtos.Envelope envelope = envelopeCaptor.getValue(); assertEquals(aci, UUID.fromString(envelope.getDestinationUuid())); assertEquals(aci, UUID.fromString(envelope.getSourceUuid())); assertEquals(Device.PRIMARY_ID, envelope.getSourceDevice()); assertFalse(updatedPhoneNumberIdentifiersByAccount.containsKey(account)); }
public boolean isFound() { return found; }
@Test public void testCalcAverageSpeedDetailsWithShortDistances_issue1848() { Weighting weighting = new SpeedWeighting(carAvSpeedEnc); Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 6); assertTrue(p.isFound()); Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting, List.of(AVERAGE_SPEED), new PathDetailsBuilderFactory(), 0, pathDetailGraph); assertEquals(1, details.size()); List<PathDetail> averageSpeedDetails = details.get(AVERAGE_SPEED); assertEquals(4, averageSpeedDetails.size()); // reverse path includes 'null' value as first p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(6, 1); assertTrue(p.isFound()); details = PathDetailsFromEdges.calcDetails(p, carManager, weighting, List.of(AVERAGE_SPEED), new PathDetailsBuilderFactory(), 0, pathDetailGraph); assertEquals(1, details.size()); averageSpeedDetails = details.get(AVERAGE_SPEED); assertEquals(5, averageSpeedDetails.size()); assertNull(averageSpeedDetails.get(0).getValue()); }
@Override public PrefsRoot getPrefsRoot() { final PrefsRoot root = new PrefsRoot(1); for (String locale : mLocaleToStore) { final PrefItem localeChild = root.createChild().addValue("locale", locale); NextWordsStorage storage = new NextWordsStorage(mContext, locale); for (NextWordsContainer nextWordsContainer : storage.loadStoredNextWords()) { final PrefItem word = localeChild.createChild().addValue("word", nextWordsContainer.word.toString()); for (NextWord nextWord : nextWordsContainer.getNextWordSuggestions()) { word.createChild() .addValue("nextWord", nextWord.nextWord) .addValue("usedCount", Integer.toString(nextWord.getUsedCount())); } } } return root; }
@Test public void testEmptyLoad() { final NextWordPrefsProvider underTest = new NextWordPrefsProvider(getApplicationContext(), asList("en", "fr")); final PrefsRoot emptyRoot = underTest.getPrefsRoot(); Assert.assertEquals(1, emptyRoot.getVersion()); Assert.assertEquals(0, TestUtils.convertToList(emptyRoot.getValues()).size()); Assert.assertEquals(2, TestUtils.convertToList(emptyRoot.getChildren()).size()); final PrefItem emptyEn = TestUtils.convertToList(emptyRoot.getChildren()).get(0); final PrefItem emptyFr = TestUtils.convertToList(emptyRoot.getChildren()).get(1); Assert.assertEquals("en", emptyEn.getValue("locale")); Assert.assertEquals(0, TestUtils.convertToList(emptyEn.getChildren()).size()); Assert.assertEquals("fr", emptyFr.getValue("locale")); Assert.assertEquals(0, TestUtils.convertToList(emptyFr.getChildren()).size()); }
@Benchmark @Threads(1) public void testBundleProcessingThreadCounterMutation( BundleProcessingThreadCounterState counterState) throws Exception { counterState.bundleCounter.inc(); }
@Test public void testBundleProcessingThreadCounterMutation() throws Exception { BundleProcessingThreadCounterState state = new BundleProcessingThreadCounterState(); new MetricsBenchmark().testBundleProcessingThreadCounterMutation(state); state.check(); }
@VisibleForTesting public static void addUserAgentEnvironments(List<String> info) { info.add(String.format(OS_FORMAT, OSUtils.OS_NAME)); if (EnvironmentUtils.isDocker()) { info.add(DOCKER_KEY); } if (EnvironmentUtils.isKubernetes()) { info.add(KUBERNETES_KEY); } if (EnvironmentUtils.isGoogleComputeEngine()) { info.add(GCE_KEY); } else { addEC2Info(info); } }
@Test public void userAgentEnvironmentStringEC2EMR() { String randomProductCode = "random123code"; Mockito.when(EnvironmentUtils.isEC2()).thenReturn(true); Mockito.when(EnvironmentUtils.getEC2ProductCode()).thenReturn(randomProductCode); Mockito.when(EnvironmentUtils.isEMR(Mockito.anyString())).thenReturn(true); Mockito.when(EC2MetadataUtils.getUserData()).thenReturn("emr_apps"); List<String> info = new ArrayList<>(); UpdateCheckUtils.addUserAgentEnvironments(info); Assert.assertEquals(4, info.size()); Assert.assertEquals(String.format(UpdateCheckUtils.PRODUCT_CODE_FORMAT, randomProductCode), info.get(1)); Assert.assertEquals(UpdateCheckUtils.EMR_KEY, info.get(2)); Assert.assertEquals(UpdateCheckUtils.EC2_KEY, info.get(3)); }
public static CoordinatorRecord newGroupMetadataTombstoneRecord( String groupId ) { return new CoordinatorRecord( new ApiMessageAndVersion( new GroupMetadataKey() .setGroup(groupId), (short) 2 ), null // Tombstone ); }
@Test public void testNewGroupMetadataTombstoneRecord() { CoordinatorRecord expectedRecord = new CoordinatorRecord( new ApiMessageAndVersion( new GroupMetadataKey() .setGroup("group-id"), (short) 2), null); CoordinatorRecord groupMetadataRecord = GroupCoordinatorRecordHelpers.newGroupMetadataTombstoneRecord("group-id"); assertEquals(expectedRecord, groupMetadataRecord); }
@Override public OUT nextRecord(OUT record) throws IOException { OUT returnRecord = null; do { returnRecord = super.nextRecord(record); } while (returnRecord == null && !reachedEnd()); return returnRecord; }
@Test void testReadSparseWithPositionSetter() { try { final String fileContent = "111|222|333|444|555|666|777|888|999|000|\n000|999|888|777|666|555|444|333|222|111|"; final FileInputSplit split = createTempFile(fileContent); final TupleTypeInfo<Tuple3<Integer, Integer, Integer>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo( Integer.class, Integer.class, Integer.class); final CsvInputFormat<Tuple3<Integer, Integer, Integer>> format = new TupleCsvInputFormat<Tuple3<Integer, Integer, Integer>>( PATH, typeInfo, new int[] {0, 3, 7}); format.setFieldDelimiter("|"); format.configure(new Configuration()); format.open(split); Tuple3<Integer, Integer, Integer> result = new Tuple3<>(); result = format.nextRecord(result); assertThat(result.f0).isEqualTo(Integer.valueOf(111)); assertThat(result.f1).isEqualTo(Integer.valueOf(444)); assertThat(result.f2).isEqualTo(Integer.valueOf(888)); result = format.nextRecord(result); assertThat(result.f0).isEqualTo(Integer.valueOf(000)); assertThat(result.f1).isEqualTo(Integer.valueOf(777)); assertThat(result.f2).isEqualTo(Integer.valueOf(333)); result = format.nextRecord(result); assertThat(result).isNull(); assertThat(format.reachedEnd()).isTrue(); } catch (Exception ex) { fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage()); } }
@Nonnull public static Map<String, String> hideSensitiveValues(Map<String, String> keyValuePairs) { final HashMap<String, String> result = new HashMap<>(); for (Map.Entry<String, String> keyValuePair : keyValuePairs.entrySet()) { if (GlobalConfiguration.isSensitive(keyValuePair.getKey())) { result.put(keyValuePair.getKey(), GlobalConfiguration.HIDDEN_CONTENT); } else { result.put(keyValuePair.getKey(), keyValuePair.getValue()); } } return result; }
@TestTemplate void testHideSensitiveValues() { final Map<String, String> keyValuePairs = new HashMap<>(); keyValuePairs.put("foobar", "barfoo"); final String secretKey1 = "secret.key"; keyValuePairs.put(secretKey1, "12345"); final String secretKey2 = "my.password"; keyValuePairs.put(secretKey2, "12345"); final Map<String, String> expectedKeyValuePairs = new HashMap<>(keyValuePairs); for (String secretKey : Arrays.asList(secretKey1, secretKey2)) { expectedKeyValuePairs.put(secretKey, GlobalConfiguration.HIDDEN_CONTENT); } final Map<String, String> hiddenSensitiveValues = ConfigurationUtils.hideSensitiveValues(keyValuePairs); assertThat(hiddenSensitiveValues).isEqualTo(expectedKeyValuePairs); }
public FileDownloader downloader() { return downloader; }
@Test public void requireThatDifferentNumberOfConfigServersWork() throws IOException { // Empty connection pool in tests etc. ConfigserverConfig.Builder builder = new ConfigserverConfig.Builder() .configServerDBDir(temporaryFolder.newFolder("serverdb").getAbsolutePath()) .configDefinitionsDir(temporaryFolder.newFolder("configdefinitions").getAbsolutePath()); FileServer fileServer = createFileServer(builder); assertEquals(0, fileServer.downloader().connectionPool().getSize()); // Empty connection pool when only one server, no use in downloading from yourself List<ConfigserverConfig.Zookeeperserver.Builder> servers = new ArrayList<>(); ConfigserverConfig.Zookeeperserver.Builder serverBuilder = new ConfigserverConfig.Zookeeperserver.Builder(); serverBuilder.hostname(HostName.getLocalhost()); serverBuilder.port(123456); servers.add(serverBuilder); builder.zookeeperserver(servers); fileServer = createFileServer(builder); assertEquals(0, fileServer.downloader().connectionPool().getSize()); // connection pool of size 1 when 2 servers ConfigserverConfig.Zookeeperserver.Builder serverBuilder2 = new ConfigserverConfig.Zookeeperserver.Builder(); serverBuilder2.hostname("bar"); serverBuilder2.port(123456); servers.add(serverBuilder2); builder.zookeeperserver(servers); fileServer = createFileServer(builder); assertEquals(1, fileServer.downloader().connectionPool().getSize()); }
public static Set<Result> anaylze(String log) { Set<Result> results = new HashSet<>(); for (Rule rule : Rule.values()) { Matcher matcher = rule.pattern.matcher(log); if (matcher.find()) { results.add(new Result(rule, log, matcher)); } } return results; }
@Test public void forgeRepeatInstallation() throws IOException { CrashReportAnalyzer.Result result = findResultByRule( CrashReportAnalyzer.anaylze(loadLog("/logs/forge_repeat_installation.txt")), CrashReportAnalyzer.Rule.FORGE_REPEAT_INSTALLATION); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ShowAuthorityRuleStatement sqlStatement, final ContextManager contextManager) { String users = rule.getConfiguration().getUsers().stream().map(each -> each.getGrantee().toString()).collect(Collectors.joining("; ")); String provider = rule.getConfiguration().getPrivilegeProvider().getType(); Properties props = rule.getConfiguration().getPrivilegeProvider().getProps().isEmpty() ? new Properties() : rule.getConfiguration().getPrivilegeProvider().getProps(); return Collections.singleton(new LocalDataQueryResultRow(users, provider, props)); }
@Test void assertGetRows() throws SQLException { engine.executeQuery(); Collection<LocalDataQueryResultRow> actual = engine.getRows(); assertThat(actual.size(), is(1)); Iterator<LocalDataQueryResultRow> iterator = actual.iterator(); LocalDataQueryResultRow row = iterator.next(); assertThat(row.getCell(1), is("root@localhost")); assertThat(row.getCell(2), is("ALL_PERMITTED")); assertThat(row.getCell(3), is("")); }
public String getUserID() { return userID; }
@Test public void testUserID() { assertEquals("USER", jt400Configuration.getUserID()); }
@Override public Rule getByKey(RuleKey key) { verifyKeyArgument(key); ensureInitialized(); Rule rule = rulesByKey.get(key); checkArgument(rule != null, "Can not find rule for key %s. This rule does not exist in DB", key); return rule; }
@Test public void getByKey_throws_NPE_if_key_argument_is_null() { expectNullRuleKeyNPE(() -> underTest.getByKey(null)); }
@Override public SelType binaryOps(SelOp op, SelType rhs) { if (rhs.type() != SelTypes.NULL && (op == SelOp.EQUAL || op == SelOp.NOT_EQUAL)) { SelTypeUtil.checkTypeMatch(this.type(), rhs.type()); } switch (op) { case ADD: return new SelString(this.val + rhs.getInternalVal()); case EQUAL: return SelBoolean.of(Objects.equals(this.val, rhs.getInternalVal())); case NOT_EQUAL: return SelBoolean.of(!Objects.equals(this.val, rhs.getInternalVal())); default: throw new UnsupportedOperationException( type() + " DO NOT support expression operation " + op); } }
@Test public void testBinaryOps() { SelString obj = SelString.of("foo"); SelType res = obj.binaryOps(SelOp.ADD, SelString.of("bar")); assertEquals("foobar", res.toString()); res = obj.binaryOps(SelOp.NOT_EQUAL, SelString.of("foobar")); assertEquals("BOOLEAN: true", res.type() + ": " + res.toString()); res = obj.binaryOps(SelOp.EQUAL, SelString.of("foo")); assertEquals("BOOLEAN: true", res.type() + ": " + res.toString()); }
public String getFactoryClassName() { return factoryClassName; }
@Test public void getFactoryClassName() { assertNull(new MapStoreConfig().getFactoryClassName()); }
@Override public String execute(CommandContext commandContext, String[] args) { int sleepMilliseconds = 0; if (args != null && args.length > 0) { if (args.length == 2 && "-t".equals(args[0]) && StringUtils.isNumber(args[1])) { sleepMilliseconds = Integer.parseInt(args[1]); } else { return "Invalid parameter,please input like shutdown -t 10000"; } } long start = System.currentTimeMillis(); if (sleepMilliseconds > 0) { try { Thread.sleep(sleepMilliseconds); } catch (InterruptedException e) { return "Failed to invoke shutdown command, cause: " + e.getMessage(); } } StringBuilder buf = new StringBuilder(); List<ApplicationModel> applicationModels = frameworkModel.getApplicationModels(); for (ApplicationModel applicationModel : new ArrayList<>(applicationModels)) { applicationModel.destroy(); } // TODO change to ApplicationDeployer.destroy() or ApplicationModel.destroy() // DubboShutdownHook.getDubboShutdownHook().unregister(); // DubboShutdownHook.getDubboShutdownHook().doDestroy(); long end = System.currentTimeMillis(); buf.append("Application has shutdown successfully"); buf.append("\r\nelapsed: "); buf.append(end - start); buf.append(" ms."); return buf.toString(); }
@Test void testInvokeWithTimeParameter() throws RemotingException { int sleepTime = 2000; long start = System.currentTimeMillis(); String result = shutdown.execute(mockCommandContext, new String[] {"-t", "" + sleepTime}); long end = System.currentTimeMillis(); assertTrue(result.contains("Application has shutdown successfully"), result); assertTrue((end - start) >= sleepTime, "sleepTime: " + sleepTime + ", execTime: " + (end - start)); }
public static boolean acceptEndpoint(String endpointUrl) { return endpointUrl != null && endpointUrl.matches(ENDPOINT_PATTERN_STRING); }
@Test public void testAcceptEndpoint() { AsyncTestSpecification specification = new AsyncTestSpecification(); MQTTMessageConsumptionTask task = new MQTTMessageConsumptionTask(specification); assertTrue(MQTTMessageConsumptionTask.acceptEndpoint("mqtt://localhost/testTopic")); assertTrue(MQTTMessageConsumptionTask.acceptEndpoint("mqtt://localhost:1883/testTopic")); assertTrue(MQTTMessageConsumptionTask.acceptEndpoint("mqtt://localhost:1883/topic/with/path/elements")); assertTrue( MQTTMessageConsumptionTask.acceptEndpoint("mqtt://localhost:1883/topic/with/path/elements?option1=value1")); assertTrue(MQTTMessageConsumptionTask.acceptEndpoint("mqtt://localhost/testTopic/option1=value1")); assertTrue(MQTTMessageConsumptionTask.acceptEndpoint("mqtt://localhost:1883/testTopic/option1=value1")); assertTrue(MQTTMessageConsumptionTask.acceptEndpoint( "mqtt://my-cluster-activemq.apps.cluster-943b.943b.example.com:1883/testTopic/option1=value1")); }
@Override public synchronized Snapshot record(long duration, TimeUnit durationUnit, Outcome outcome) { totalAggregation.record(duration, durationUnit, outcome); moveWindowByOne().record(duration, durationUnit, outcome); return new SnapshotImpl(totalAggregation); }
@Test public void testRecordError() { Metrics metrics = new FixedSizeSlidingWindowMetrics(5); Snapshot snapshot = metrics.record(100, TimeUnit.MILLISECONDS, Metrics.Outcome.ERROR); assertThat(snapshot.getTotalNumberOfCalls()).isEqualTo(1); assertThat(snapshot.getNumberOfSuccessfulCalls()).isZero(); assertThat(snapshot.getNumberOfFailedCalls()).isEqualTo(1); assertThat(snapshot.getTotalNumberOfSlowCalls()).isZero(); assertThat(snapshot.getNumberOfSlowSuccessfulCalls()).isZero(); assertThat(snapshot.getNumberOfSlowFailedCalls()).isZero(); assertThat(snapshot.getTotalDuration().toMillis()).isEqualTo(100); assertThat(snapshot.getAverageDuration().toMillis()).isEqualTo(100); assertThat(snapshot.getFailureRate()).isEqualTo(100); }
@Override public String getDescription() { return "Load changed issues for indexing"; }
@Test public void getDescription_shouldReturnDescription() { assertThat(underTest.getDescription()).isEqualTo("Load changed issues for indexing"); }
public void addData(String key, String value) throws InvalidSCMRevisionDataException { validateDataKey(key); data.put(key, value); }
@Test public void shouldThrowExceptionWhenDataKeyIsNullOrEmpty() throws Exception { SCMRevision scmRevision = new SCMRevision("rev123", new Date(), "loser", null, null, null); try { scmRevision.addData(null, "value"); } catch (InvalidSCMRevisionDataException e) { assertThat(e.getMessage(), is("Key names cannot be null or empty.")); } try { scmRevision.addData("", "value"); } catch (InvalidSCMRevisionDataException e) { assertThat(e.getMessage(), is("Key names cannot be null or empty.")); } }
protected final void ensureOpen() { if (!checkOpen(true)) { checkException(); } }
@Test public void testEnsureOpen() throws InterruptedException { EmbeddedChannel channel = new EmbeddedChannel(); channel.close().syncUninterruptibly(); try { channel.writeOutbound("Hello, Netty!"); fail("This should have failed with a ClosedChannelException"); } catch (Exception expected) { assertTrue(expected instanceof ClosedChannelException); } try { channel.writeInbound("Hello, Netty!"); fail("This should have failed with a ClosedChannelException"); } catch (Exception expected) { assertTrue(expected instanceof ClosedChannelException); } }
public double calculateElevationBasedOnThreePoints(double lat, double lon, double lat0, double lon0, double ele0, double lat1, double lon1, double ele1, double lat2, double lon2, double ele2) { double dlat10 = lat1 - lat0; double dlon10 = lon1 - lon0; double dele10 = ele1 - ele0; double dlat20 = lat2 - lat0; double dlon20 = lon2 - lon0; double dele20 = ele2 - ele0; double a = dlon10 * dele20 - dele10 * dlon20; double b = dele10 * dlat20 - dlat10 * dele20; double c = dlat10 * dlon20 - dlon10 * dlat20; if (Math.abs(c) < EPSILON) { double dlat21 = lat2 - lat1; double dlon21 = lon2 - lon1; double dele21 = ele2 - ele1; double l10 = dlat10 * dlat10 + dlon10 * dlon10 + dele10 * dele10; double l20 = dlat20 * dlat20 + dlon20 * dlon20 + dele20 * dele20; double l21 = dlat21 * dlat21 + dlon21 * dlon21 + dele21 * dele21; if (l21 > l10 && l21 > l20) { return calculateElevationBasedOnTwoPoints(lat, lon, lat1, lon1, ele1, lat2, lon2, ele2); } else if (l20 > l10 && l20 > l21) { return calculateElevationBasedOnTwoPoints(lat, lon, lat0, lon0, ele0, lat2, lon2, ele2); } else { return calculateElevationBasedOnTwoPoints(lat, lon, lat0, lon0, ele0, lat1, lon1, ele1); } } else { double d = a * lat0 + b * lon0 + c * ele0; double ele = (d - a * lat - b * lon) / c; return round2(ele); } }
@Test public void calculatesElevationOnThreePoints() { assertEquals(-0.88, elevationInterpolator.calculateElevationBasedOnThreePoints(0, 0, 1, 2, 3, 4, 6, 9, 12, 11, 9), PRECISION); assertEquals(15, elevationInterpolator.calculateElevationBasedOnThreePoints(10, 0, 0, 0, 0, 10, 10, 10, 10, -10, 20), PRECISION); assertEquals(5, elevationInterpolator.calculateElevationBasedOnThreePoints(5, 5, 0, 0, 0, 10, 10, 10, 20, 20, 20), PRECISION); }
@VisibleForTesting static boolean shouldVerifySslHostname(final Map<String, Object> config) { final Object endpointIdentificationAlgoConfig = config.get(KsqlConfig.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG); if (endpointIdentificationAlgoConfig == null) { return false; } final String endpointIdentificationAlgo = endpointIdentificationAlgoConfig.toString(); if (endpointIdentificationAlgo.isEmpty() || endpointIdentificationAlgo .equalsIgnoreCase(KsqlConfig.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_NONE)) { return false; } else if (endpointIdentificationAlgo .equalsIgnoreCase(KsqlConfig.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_HTTPS)) { return true; } else { throw new ConfigException("Endpoint identification algorithm not supported: " + endpointIdentificationAlgo); } }
@Test public void shouldEnableHostnameVerification() { // When / Then: assertThat(DefaultConnectClientFactory.shouldVerifySslHostname(CONFIGS_WITH_HOSTNAME_VERIFICATION_ENABLED), is(true)); }
public static <T> String listToString(List<T> list) { StringBuilder sb = new StringBuilder(); for (T s : list) { if (sb.length() != 0) { sb.append(" "); } sb.append(s); } return sb.toString(); }
@Test public void listToString() { class TestCase { List<Object> mInput; String mExpected; public TestCase(String expected, Object... objs) { mExpected = expected; mInput = Arrays.asList(objs); } } List<TestCase> testCases = new ArrayList<>(); testCases.add(new TestCase("")); testCases.add(new TestCase("foo", "foo")); testCases.add(new TestCase("foo bar", "foo", "bar")); testCases.add(new TestCase("1", 1)); testCases.add(new TestCase("1 2 3", 1, 2, 3)); for (TestCase testCase : testCases) { assertEquals(testCase.mExpected, CommonUtils.listToString(testCase.mInput)); } }
@Asn1Property(tagNo = 0x30, converter = DigestsConverter.class) public Map<Integer, byte[]> getDigests() { return digests; }
@Test public void readDl2Cms() throws Exception { final LdsSecurityObject ldsSecurityObject = mapper.read( readFromCms("dl2"), LdsSecurityObject.class); assertEquals(ImmutableSet.of(1, 5, 6, 11, 12, 13, 14), ldsSecurityObject.getDigests().keySet()); }
@VisibleForTesting List<WorkflowRollupOverview> getForeachAndSubworkflowStepRollups( String workflowId, long workflowInstanceId, Map<String, Long> stepIdRunIdForeachSubworkflowPrevious) { List<WorkflowRollupOverview> rollupOverviewsForForeachAndSubworkflow = new ArrayList<>(); if (stepIdRunIdForeachSubworkflowPrevious == null || stepIdRunIdForeachSubworkflowPrevious.isEmpty()) { return rollupOverviewsForForeachAndSubworkflow; } List<Map<String, Artifact>> artifacts = stepInstanceDao.getBatchStepInstancesArtifactsFromList( workflowId, workflowInstanceId, stepIdRunIdForeachSubworkflowPrevious); for (Map<String, Artifact> artifact : artifacts) { if (artifact.containsKey(Artifact.Type.SUBWORKFLOW.key())) { SubworkflowArtifact subworkflowArtifact = artifact.get(Artifact.Type.SUBWORKFLOW.key()).asSubworkflow(); if (subworkflowArtifact.getSubworkflowOverview() != null) { rollupOverviewsForForeachAndSubworkflow.add( subworkflowArtifact.getSubworkflowOverview().getRollupOverview()); } } if (artifact.containsKey(Artifact.Type.FOREACH.key())) { ForeachArtifact foreachArtifact = artifact.get(Artifact.Type.FOREACH.key()).asForeach(); if (foreachArtifact.getForeachOverview() != null && foreachArtifact.getForeachOverview().getCheckpoint() > 0) { rollupOverviewsForForeachAndSubworkflow.add( foreachArtifact.getForeachOverview().getOverallRollup()); } } } return rollupOverviewsForForeachAndSubworkflow; }
@Test public void testGetForeachAndSubworkflowStepRollups() throws IOException { ArtifactMap artifacts = loadObject("fixtures/artifact/sample-artifacts.json", ArtifactMap.class); artifacts.getArtifacts().remove("artifact1"); artifacts.getArtifacts().remove("artifact2"); String workflowId = "test_workflow_id"; long workflowInstanceId = 2L; Map<String, Long> stepIdToRunId = new HashMap<>(); stepIdToRunId.put("maestro_subworkflow", 1L); stepIdToRunId.put("maestro_foreach", 1L); doReturn(Collections.singletonList(artifacts.getArtifacts())) .when(stepInstanceDao) .getBatchStepInstancesArtifactsFromList(workflowId, workflowInstanceId, stepIdToRunId); RollupAggregationHelper rollupAggregationHelper = new RollupAggregationHelper(stepInstanceDao); List<WorkflowRollupOverview> rollups = rollupAggregationHelper.getForeachAndSubworkflowStepRollups( workflowId, workflowInstanceId, stepIdToRunId); assertEquals(2, rollups.size()); assertEquals(29, rollups.get(0).getTotalLeafCount()); assertEquals(2, rollups.get(0).getOverview().size()); assertEquals(14, rollups.get(1).getTotalLeafCount()); assertEquals(2, rollups.get(1).getOverview().size()); // passing null stepIdToRunId rollups = this.rollupAggregationHelper.getForeachAndSubworkflowStepRollups( workflowId, workflowInstanceId, null); assertTrue(rollups.isEmpty()); // passing empty stepIdToRunId rollups = this.rollupAggregationHelper.getForeachAndSubworkflowStepRollups( workflowId, workflowInstanceId, new HashMap<>()); assertTrue(rollups.isEmpty()); }
@ConstantFunction(name = "mod", argTypes = {BIGINT, BIGINT}, returnType = BIGINT) public static ConstantOperator modBigInt(ConstantOperator first, ConstantOperator second) { if (second.getBigint() == 0) { return ConstantOperator.createNull(Type.BIGINT); } return ConstantOperator.createBigint(first.getBigint() % second.getBigint()); }
@Test public void modBigInt() { assertEquals(0, ScalarOperatorFunctions.modBigInt(O_BI_100, O_BI_100).getBigint()); }
public static long parseLongAscii(final CharSequence cs, final int index, final int length) { if (length <= 0) { throw new AsciiNumberFormatException("empty string: index=" + index + " length=" + length); } final boolean negative = MINUS_SIGN == cs.charAt(index); int i = index; if (negative) { i++; if (1 == length) { throwParseLongError(cs, index, length); } } final int end = index + length; if (end - i < LONG_MAX_DIGITS) { final long tally = parsePositiveLongAscii(cs, index, length, i, end); return negative ? -tally : tally; } else if (negative) { return -parseLongAsciiOverflowCheck(cs, index, length, LONG_MIN_VALUE_DIGITS, i, end); } else { return parseLongAsciiOverflowCheck(cs, index, length, LONG_MAX_VALUE_DIGITS, i, end); } }
@Test void shouldThrowExceptionWhenParsingLongContainingLonePlusSign() { assertThrows(AsciiNumberFormatException.class, () -> parseLongAscii("+", 0, 1)); }
public static BundleDistribution bundleProcessingThreadDistribution( String shortId, MetricName name) { return new BundleProcessingThreadDistribution(shortId, name); }
@Test public void testAccurateBundleDistributionUsingMultipleThreads() throws Exception { BundleDistribution bundleDistribution = Metrics.bundleProcessingThreadDistribution(TEST_ID, TEST_NAME); List<ByteString> values = testAccurateBundleMetricUsingMultipleThreads( bundleDistribution, () -> bundleDistribution.update(1)); assertTrue(values.size() >= 10); List<DistributionData> sortedValues = new ArrayList<>(); for (ByteString value : values) { sortedValues.add(MonitoringInfoEncodings.decodeInt64Distribution(value)); } Collections.sort(sortedValues, Comparator.comparingLong(DistributionData::count)); List<ByteString> sortedEncodedValues = new ArrayList<>(); for (DistributionData value : sortedValues) { sortedEncodedValues.add(MonitoringInfoEncodings.encodeInt64Distribution(value)); } assertThat(values, contains(sortedEncodedValues.toArray())); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldFindFewerGenericsWithoutObjVariadic() { // Given: givenFunctions( function(EXPECTED, 3, INT, GenericType.of("A"), INT, INT_VARARGS), function(OTHER, 3, INT, GenericType.of("B"), INT, OBJ_VARARGS) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(ImmutableList.of( SqlArgument.of(INTEGER), SqlArgument.of(INTEGER), SqlArgument.of(INTEGER), SqlArgument.of(INTEGER) )); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
@Operation(summary = "queryAuditLogListPaging", description = "QUERY_AUDIT_LOG") @Parameters({ @Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)), @Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)), @Parameter(name = "resourceType", description = "RESOURCE_TYPE", schema = @Schema(implementation = AuditResourceType.class)), @Parameter(name = "operationType", description = "OPERATION_TYPE", schema = @Schema(implementation = AuditOperationType.class)), @Parameter(name = "userName", description = "USER_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")), @Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20")) }) @GetMapping(value = "/audit-log-list") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_AUDIT_LOG_LIST_PAGING) public Result<PageInfo<AuditDto>> queryAuditLogListPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize, @RequestParam(value = "resourceType", required = false) AuditResourceType resourceType, @RequestParam(value = "operationType", required = false) AuditOperationType operationType, @RequestParam(value = "startDate", required = false) String startDate, @RequestParam(value = "endDate", required = false) String endDate, @RequestParam(value = "userName", required = false) String userName) { checkPageParams(pageNo, pageSize); PageInfo<AuditDto> auditDtoPageInfo = auditService.queryLogListPaging( loginUser, resourceType, operationType, startDate, endDate, userName, pageNo, pageSize); return Result.success(auditDtoPageInfo); }
@Test public void testQueryAuditLogListPaging() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("pageNo", "1"); paramsMap.add("pageSize", "10"); MvcResult mvcResult = mockMvc.perform(get("/projects/audit/audit-log-list") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }
@Override public void updateSecurityGroup(SecurityGroup sg) { checkNotNull(sg, ERR_NULL_SG); checkArgument(!Strings.isNullOrEmpty(sg.getId()), ERR_NULL_SG_ID); osSecurityGroupStore.updateSecurityGroup(sg); }
@Test(expected = IllegalArgumentException.class) public void testUpdateUnregisteredSecurityGroup() { target.updateSecurityGroup(securityGroup1); }
@Override public ByteBuf writeDouble(double value) { writeLong(Double.doubleToRawLongBits(value)); return this; }
@Test public void testWriteDoubleAfterRelease() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().writeDouble(1); } }); }
@Override public Path touch(final Path file, final TransferStatus status) throws BackgroundException { try { if(!session.getClient().existsAndIsAccessible(file.getParent().getAbsolute())) { session.getClient().putDirectory(file.getParent().getAbsolute()); } final MantaObjectResponse response = session.getClient().put(file.getAbsolute(), new byte[0]); return file.withAttributes(new MantaObjectAttributeAdapter(session).toAttributes(response)); } catch(MantaException e) { throw new MantaExceptionMappingService().map("Cannot create {0}", e, file); } catch(MantaClientHttpResponseException e) { throw new MantaHttpExceptionMappingService().map("Cannot create {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot create {0}", e, file); } }
@Test public void testTouch() throws Exception { final Path file = new Path( testPathPrefix, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new MantaTouchFeature(session).touch(file, new TransferStatus().withMime("x-application/cyberduck")); assertNotNull(new MantaAttributesFinderFeature(session).find(file)); new MantaDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public void subscribe(String serviceName, String group, EventListener eventListener) throws NacosException { String nacosServiceName = handleInnerSymbol(serviceName); SubscribeInfo subscribeInfo = new SubscribeInfo(nacosServiceName, group, eventListener); NamingService namingService = ConcurrentHashMapUtils.computeIfAbsent( subscribeStatus, subscribeInfo, info -> nacosConnectionManager.getNamingService()); accept(() -> namingService.subscribe(nacosServiceName, group, eventListener)); }
@Test void testSubscribe() throws NacosException { NacosConnectionManager connectionManager = Mockito.mock(NacosConnectionManager.class); NamingService namingService = Mockito.mock(NamingService.class); Mockito.when(connectionManager.getNamingService()).thenReturn(namingService); NacosNamingServiceWrapper nacosNamingServiceWrapper = new NacosNamingServiceWrapper(connectionManager, 0, 0); EventListener eventListener = Mockito.mock(EventListener.class); nacosNamingServiceWrapper.subscribe("service_name", "test", eventListener); Mockito.verify(namingService, Mockito.times(1)).subscribe("service_name", "test", eventListener); nacosNamingServiceWrapper.subscribe("service_name", "test", eventListener); Mockito.verify(namingService, Mockito.times(2)).subscribe("service_name", "test", eventListener); nacosNamingServiceWrapper.unsubscribe("service_name", "test", eventListener); Mockito.verify(namingService, Mockito.times(1)).unsubscribe("service_name", "test", eventListener); nacosNamingServiceWrapper.unsubscribe("service_name", "test", eventListener); Mockito.verify(namingService, Mockito.times(1)).unsubscribe("service_name", "test", eventListener); nacosNamingServiceWrapper.unsubscribe("service_name", "mock", eventListener); Mockito.verify(namingService, Mockito.times(0)).unsubscribe("service_name", "mock", eventListener); }
public String getBlobName(final Exchange exchange) { return getOption(BlobExchangeHeaders::getBlobNameFromHeaders, configuration::getBlobName, exchange); }
@Test void testIfCorrectOptionsReturnedCorrectly() { final BlobConfiguration configuration = new BlobConfiguration(); // first case: when exchange is set final Exchange exchange = new DefaultExchange(context); final BlobConfigurationOptionsProxy configurationOptionsProxy = new BlobConfigurationOptionsProxy(configuration); exchange.getIn().setHeader(BlobConstants.BLOB_NAME, "testBlobExchange"); configuration.setBlobName("testBlobConfig"); assertEquals("testBlobExchange", configurationOptionsProxy.getBlobName(exchange)); // second class: exchange is empty exchange.getIn().setHeader(BlobConstants.BLOB_NAME, null); assertEquals("testBlobConfig", configurationOptionsProxy.getBlobName(exchange)); // third class: if no option at all configuration.setBlobName(null); assertNull(configurationOptionsProxy.getBlobName(exchange)); }
@Override public String getInternalProperty(String key) { return dubboConfig.getProperty(key, null); }
@Test void testGetInternalProperty() throws InterruptedException { String mockKey = "mockKey2"; String mockValue = String.valueOf(new Random().nextInt()); putMockRuleData(mockKey, mockValue, DEFAULT_NAMESPACE); TimeUnit.MILLISECONDS.sleep(1000); apolloDynamicConfiguration = new ApolloDynamicConfiguration(url, applicationModel); assertEquals(mockValue, apolloDynamicConfiguration.getInternalProperty(mockKey)); mockValue = "mockValue2"; System.setProperty(mockKey, mockValue); assertEquals(mockValue, apolloDynamicConfiguration.getInternalProperty(mockKey)); mockKey = "notExistKey"; assertNull(apolloDynamicConfiguration.getInternalProperty(mockKey)); }
public static FlinkPod loadPodFromTemplateFile( FlinkKubeClient kubeClient, File podTemplateFile, String mainContainerName) { final KubernetesPod pod = kubeClient.loadPodFromTemplateFile(podTemplateFile); final List<Container> otherContainers = new ArrayList<>(); Container mainContainer = null; if (null != pod.getInternalResource().getSpec()) { for (Container container : pod.getInternalResource().getSpec().getContainers()) { if (mainContainerName.equals(container.getName())) { mainContainer = container; } else { otherContainers.add(container); } } pod.getInternalResource().getSpec().setContainers(otherContainers); } else { // Set an empty spec for pod template pod.getInternalResource().setSpec(new PodSpecBuilder().build()); } if (mainContainer == null) { LOG.info( "Could not find main container {} in pod template, using empty one to initialize.", mainContainerName); mainContainer = new ContainerBuilder().build(); } return new FlinkPod(pod.getInternalResource(), mainContainer); }
@Test void testLoadPodFromTemplateAndCheckSideCarContainer() { final FlinkPod flinkPod = KubernetesUtils.loadPodFromTemplateFile( flinkKubeClient, KubernetesPodTemplateTestUtils.getPodTemplateFile(), KubernetesPodTemplateTestUtils.TESTING_MAIN_CONTAINER_NAME); assertThat(flinkPod.getPodWithoutMainContainer().getSpec().getContainers()).hasSize(1); assertThat(flinkPod.getPodWithoutMainContainer().getSpec().getContainers().get(0)) .isEqualTo(KubernetesPodTemplateTestUtils.createSideCarContainer()); }
@Override public void emit(String emitKey, List<Metadata> metadataList, ParseContext parseContext) throws IOException, TikaEmitterException { if (metadataList == null || metadataList.size() < 1) { return; } List<EmitData> emitDataList = new ArrayList<>(); emitDataList.add(new EmitData(new EmitKey("", emitKey), metadataList)); emit(emitDataList); }
@Test public void testAttachments(@TempDir Path tmpDir) throws Exception { Files.createDirectories(tmpDir.resolve("db")); Path dbDir = tmpDir.resolve("db/h2"); Path config = tmpDir.resolve("tika-config.xml"); String connectionString = "jdbc:h2:file:" + dbDir.toAbsolutePath(); writeConfig("/configs/tika-config-jdbc-emitter-attachments.xml", connectionString, config); EmitterManager emitterManager = EmitterManager.load(config); Emitter emitter = emitterManager.getEmitter(); List<Metadata> data = new ArrayList<>(); data.add(m("k1", "true", "k2", "some string1", "k3", "4", "k4", "100")); data.add(m("k1", "false", "k2", "some string2", "k3", "5", "k4", "101")); data.add(m("k1", "true", "k2", "some string3", "k3", "6", "k4", "102")); emitter.emit("id0", data, new ParseContext()); try (Connection connection = DriverManager.getConnection(connectionString)) { try (Statement st = connection.createStatement()) { try (ResultSet rs = st.executeQuery("select * from test")) { int rows = 0; assertEquals("path", rs.getMetaData().getColumnName(1).toLowerCase(Locale.US)); assertEquals("attachment_num", rs.getMetaData().getColumnName(2).toLowerCase(Locale.US)); while (rs.next()) { assertEquals("id0", rs.getString(1)); assertEquals(rows, rs.getInt(2)); assertEquals(rows % 2 == 0, rs.getBoolean(3)); assertEquals("some string" + (rows + 1), rs.getString(4)); assertEquals(rows + 4, rs.getInt(5)); assertEquals(100 + rows, rs.getLong(6)); rows++; } } } } }
@Override public AppResponse process(Flow flow, RsPollAppApplicationResultRequest request) throws SharedServiceClientException { checkSwitchesEnabled(); final String activationStatus = appSession.getActivationStatus(); final Long accountId = appSession.getAccountId(); final String userAppId = appSession.getUserAppId(); final boolean removeOldApp = request.getRemoveOldApp().equals("true"); String status; int maxAppsPerUser = sharedServiceClient.getSSConfigInt("Maximum_aantal_DigiD_apps_eindgebruiker"); appSession.setRemoveOldApp(removeOldApp); if (TOO_MANY_APPS.equals(activationStatus) && !removeOldApp) { AppAuthenticator leastRecentApp = appAuthenticatorService.findLeastRecentApp(accountId); return new TooManyAppsResponse("too_many_active", maxAppsPerUser, leastRecentApp.getDeviceName(), leastRecentApp.getLastSignInOrActivatedAtOrCreatedAt().toLocalDate().format(DateTimeFormatter.ofPattern("dd-MM-yyyy"))); } status = TOO_MANY_APPS.equals(activationStatus) && removeOldApp ? OK : activationStatus; if (!status.equals(OK)) { setValid(false); } return new RsPollAppApplicationResultResponse(status, userAppId); }
@Test public void processRsPollAppApplicationResultPendingTest() throws SharedServiceClientException { when(sharedServiceClient.getSSConfigInt("Maximum_aantal_DigiD_apps_eindgebruiker")).thenReturn(5); when(switchService.digidAppSwitchEnabled()).thenReturn(true); when(switchService.digidRequestStationEnabled()).thenReturn(true); rsPollAppApplicationResult.setAppSession(createAppSession(ApplyForAppAtRequestStationFlow.NAME, State.RS_APP_APPLICATION_STARTED, "PENDING")); mockedRsPollAppApplicationResultRequest = new RsPollAppApplicationResultRequest(); mockedRsPollAppApplicationResultRequest.setActivationCode(APP_ACTIVATION_CODE); mockedRsPollAppApplicationResultRequest.setRemoveOldApp("true"); AppResponse appResponse = rsPollAppApplicationResult.process(mockedApplyForAppAtRequestStationFlow, mockedRsPollAppApplicationResultRequest); assertEquals(true, rsPollAppApplicationResult.getAppSession().isRemoveOldApp()); assertEquals(PENDING, rsPollAppApplicationResult.getAppSession().getActivationStatus()); assertTrue(appResponse instanceof RsPollAppApplicationResultResponse); assertEquals(PENDING,((RsPollAppApplicationResultResponse) appResponse).getStatus()); assertEquals(USER_APP_ID,((RsPollAppApplicationResultResponse) appResponse).getUserAppId()); }
public static FlinkPod loadPodFromTemplateFile( FlinkKubeClient kubeClient, File podTemplateFile, String mainContainerName) { final KubernetesPod pod = kubeClient.loadPodFromTemplateFile(podTemplateFile); final List<Container> otherContainers = new ArrayList<>(); Container mainContainer = null; if (null != pod.getInternalResource().getSpec()) { for (Container container : pod.getInternalResource().getSpec().getContainers()) { if (mainContainerName.equals(container.getName())) { mainContainer = container; } else { otherContainers.add(container); } } pod.getInternalResource().getSpec().setContainers(otherContainers); } else { // Set an empty spec for pod template pod.getInternalResource().setSpec(new PodSpecBuilder().build()); } if (mainContainer == null) { LOG.info( "Could not find main container {} in pod template, using empty one to initialize.", mainContainerName); mainContainer = new ContainerBuilder().build(); } return new FlinkPod(pod.getInternalResource(), mainContainer); }
@Test void testLoadPodFromTemplateAndCheckMetaData() { final FlinkPod flinkPod = KubernetesUtils.loadPodFromTemplateFile( flinkKubeClient, KubernetesPodTemplateTestUtils.getPodTemplateFile(), KubernetesPodTemplateTestUtils.TESTING_MAIN_CONTAINER_NAME); // The pod name is defined in the test/resources/testing-pod-template.yaml. final String expectedPodName = "pod-template"; assertThat(flinkPod.getPodWithoutMainContainer().getMetadata().getName()) .isEqualTo(expectedPodName); }
@SuppressWarnings("unchecked") void sort(String[] filenames) { Arrays.sort(filenames, new Comparator<String>() { @Override public int compare(String f1, String f2) { int result = 0; for (FilenameParser p : parsers) { Comparable c2 = p.parseFilename(f2); Comparable c1 = p.parseFilename(f1); if (c2 != null && c1 != null) { result += c2.compareTo(c1); } } // fallback to raw filename comparison if (result == 0) { result = f2.compareTo(f1); } return result; } }); }
@Test public void sortsDescendingByDateWithMultipleDatesInPattern() { final String[] FILENAMES = new String[] { "/var/logs/my-app/2018-10/2018-10-31.log", "/var/logs/my-app/2019-01/2019-01-01.log", "/var/logs/my-app/1999-03/1999-03-17.log", "/var/logs/my-app/2019-02/2019-02-14.log", "/var/logs/my-app/2016-12/2016-12-31.log", "/var/logs/my-app/2016-12/2016-12-25.log", }; final String[] EXPECTED_RESULT = new String[] { "/var/logs/my-app/2019-02/2019-02-14.log", "/var/logs/my-app/2019-01/2019-01-01.log", "/var/logs/my-app/2018-10/2018-10-31.log", "/var/logs/my-app/2016-12/2016-12-31.log", "/var/logs/my-app/2016-12/2016-12-25.log", "/var/logs/my-app/1999-03/1999-03-17.log", }; assertThat(sort("/var/logs/my-app/%d{yyyy-MM,aux}/%d{yyyy-MM-dd}.log", FILENAMES), contains(EXPECTED_RESULT)); }
@Override public String getSecretsConfigView(String pluginId) { return pluginRequestHelper.submitRequest(pluginId, REQUEST_GET_SECRETS_CONFIG_VIEW, new DefaultPluginInteractionCallback<>() { @Override public String onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return secretsMessageConverterV1.getSecretsConfigViewFromResponse(responseBody); } }); }
@Test void shouldTalkToPlugin_toFetchSecretsConfigView() { String responseBody = "{ \"template\": \"<div>This is secrets config view snippet</div>\" }"; when(pluginManager.submitTo(eq(PLUGIN_ID), eq(SECRETS_EXTENSION), requestArgumentCaptor.capture())).thenReturn(DefaultGoPluginApiResponse.success(responseBody)); final String view = secretsExtensionV1.getSecretsConfigView(PLUGIN_ID); assertThat(view).isEqualTo("<div>This is secrets config view snippet</div>"); assertExtensionRequest(REQUEST_GET_SECRETS_CONFIG_VIEW, null); }
public List<QueuedCommand> getRestoreCommands(final Duration duration) { if (commandTopicBackup.commandTopicCorruption()) { log.warn("Corruption detected. " + "Use backup to restore command topic."); return Collections.emptyList(); } return getAllCommandsInCommandTopic( commandConsumer, commandTopicPartition, Optional.of(commandTopicBackup), duration ); }
@Test public void shouldHaveOffsetsInQueuedCommands() { // Given: when(commandConsumer.poll(any(Duration.class))) .thenReturn(someConsumerRecords( new ConsumerRecord<>("topic", 0, 0, commandId1, command1), new ConsumerRecord<>("topic", 0, 1, commandId2, command2))) .thenReturn(someConsumerRecords( new ConsumerRecord<>("topic", 0, 2, commandId3, command3))); when(commandConsumer.endOffsets(any())).thenReturn(ImmutableMap.of(TOPIC_PARTITION, 3L)); when(commandConsumer.position(TOPIC_PARTITION)).thenReturn(0L, 2L, 3L); // When: final List<QueuedCommand> queuedCommandList = commandTopic .getRestoreCommands(Duration.ofMillis(1)); // Then: verify(commandConsumer).seekToBeginning(topicPartitionsCaptor.capture()); assertThat(topicPartitionsCaptor.getValue(), equalTo(Collections.singletonList(new TopicPartition(COMMAND_TOPIC_NAME, 0)))); assertThat(queuedCommandList, equalTo(ImmutableList.of( new QueuedCommand(commandId1, command1, Optional.empty(), 0L), new QueuedCommand(commandId2, command2, Optional.empty(),1L), new QueuedCommand(commandId3, command3, Optional.empty(), 2L)))); }
@Override public List<Bar> aggregate(List<Bar> bars) { final List<Bar> aggregated = new ArrayList<>(); if (bars.isEmpty()) { return aggregated; } final Bar firstBar = bars.get(0); // get the actual time period final Duration actualDur = firstBar.getTimePeriod(); // check if new timePeriod is a multiplication of actual time period final boolean isMultiplication = timePeriod.getSeconds() % actualDur.getSeconds() == 0; if (!isMultiplication) { throw new IllegalArgumentException( "Cannot aggregate bars: the new timePeriod must be a multiplication of the actual timePeriod."); } int i = 0; final Num zero = firstBar.getOpenPrice().zero(); while (i < bars.size()) { Bar bar = bars.get(i); final ZonedDateTime beginTime = bar.getBeginTime(); final Num open = bar.getOpenPrice(); Num high = bar.getHighPrice(); Num low = bar.getLowPrice(); Num close = null; Num volume = zero; Num amount = zero; long trades = 0; Duration sumDur = Duration.ZERO; while (isInDuration(sumDur)) { if (i < bars.size()) { if (!beginTimesInDuration(beginTime, bars.get(i).getBeginTime())) { break; } bar = bars.get(i); if (high == null || bar.getHighPrice().isGreaterThan(high)) { high = bar.getHighPrice(); } if (low == null || bar.getLowPrice().isLessThan(low)) { low = bar.getLowPrice(); } close = bar.getClosePrice(); if (bar.getVolume() != null) { volume = volume.plus(bar.getVolume()); } if (bar.getAmount() != null) { amount = amount.plus(bar.getAmount()); } if (bar.getTrades() != 0) { trades = trades + bar.getTrades(); } } sumDur = sumDur.plus(actualDur); i++; } if (!onlyFinalBars || i <= bars.size()) { final Bar aggregatedBar = new BaseBar(timePeriod, beginTime.plus(timePeriod), open, high, low, close, volume, amount, trades); aggregated.add(aggregatedBar); } } return aggregated; }
@Test public void upscaledTo10DayBarsNotOnlyFinalBars() { final DurationBarAggregator barAggregator = new DurationBarAggregator(Duration.ofDays(10), false); final List<Bar> bars = barAggregator.aggregate(getOneDayBars()); // must be 2 bars assertEquals(2, bars.size()); }
private void fixGrace() { // Modify the elegant online and offline switches based on environment variables, // and this configuration has the highest priority final GraceConfig graceConfig = PluginConfigManager.getPluginConfig(GraceConfig.class); graceConfig.fixGraceSwitch(); if (graceConfig.isEnableSpring() && graceConfig.isEnableGraceShutdown()) { Runtime.getRuntime().addShutdownHook(new GraceShutdownHook()); } }
@Test public void testFixGrace() { final RegistryConfigSubscribeServiceImpl service = new RegistryConfigSubscribeServiceImpl(); try (final MockedStatic<PluginConfigManager> pluginConfigManagerMockedStatic = Mockito.mockStatic(PluginConfigManager.class);){ final GraceConfig graceConfig = new GraceConfig(); pluginConfigManagerMockedStatic.when(() -> PluginConfigManager.getPluginConfig(GraceConfig.class)) .thenReturn(graceConfig); final Map<String, String> env = Collections.singletonMap(GraceConstants.ENV_GRACE_ENABLE, "true"); EnvUtils.addEnv(env); ReflectUtils.invokeMethod(service, "fixGrace", null, null); Assert.assertTrue(graceConfig.isEnableGraceShutdown()); Assert.assertTrue(graceConfig.isEnableOfflineNotify()); Assert.assertTrue(graceConfig.isEnableWarmUp()); EnvUtils.delEnv(env); } catch (Exception exception) { // ignored } }
@Override public HashSlotCursor8byteKey cursor() { return new Cursor(); }
@Test @RequireAssertEnabled public void testCursor_advance_afterAdvanceReturnsFalse() { insert(random.nextLong()); HashSlotCursor8byteKey cursor = hsa.cursor(); cursor.advance(); cursor.advance(); assertThrows(AssertionError.class, cursor::advance); }
@Override @SuppressWarnings("AndroidJdkLibsChecker") public boolean advanceTo(long endTime) { if (endTime < SystemClock.uptimeMillis()) { return false; } boolean hasQueueTasks = hasTasksScheduledBefore(endTime); shadowOf(looper).idleFor(Duration.ofMillis(endTime - SystemClock.uptimeMillis())); return hasQueueTasks; }
@Test public void advanceTo() { assertThat(scheduler.advanceTo(0)).isFalse(); assertThat(scheduler.advanceTo(SystemClock.uptimeMillis())).isFalse(); Runnable runnable = mock(Runnable.class); new Handler(getMainLooper()).post(runnable); verify(runnable, times(0)).run(); assertThat(scheduler.advanceTo(SystemClock.uptimeMillis())).isTrue(); verify(runnable, times(1)).run(); }
@Override public String getPath() { return path; }
@Test(expectedExceptions = FileNotFoundException.class) public void testWritingToDirectoryThrowsException2() throws IOException { File dir = createDir(); fs.getOutput(dir.getPath()); // should throw exception }
public DirectoryScanner(FsDatasetSpi<?> dataset, Configuration conf) { this.dataset = dataset; this.stats = new HashMap<>(DEFAULT_MAP_SIZE); int interval = (int) conf.getTimeDuration( DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_DEFAULT, TimeUnit.SECONDS); scanPeriodMsecs = TimeUnit.SECONDS.toMillis(interval); int throttle = conf.getInt( DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_KEY, DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_DEFAULT); if (throttle >= TimeUnit.SECONDS.toMillis(1)) { LOG.warn( "{} set to value above 1000 ms/sec. Assuming default value of {}", DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_KEY, DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_DEFAULT); throttle = DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THROTTLE_LIMIT_MS_PER_SEC_DEFAULT; } throttleLimitMsPerSec = throttle; int threads = conf.getInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THREADS_KEY, DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_THREADS_DEFAULT); reportCompileThreadPool = Executors.newFixedThreadPool(threads, new Daemon.DaemonFactory()); masterThread = new ScheduledThreadPoolExecutor(1, new Daemon.DaemonFactory()); int reconcileBatchSize = conf.getInt(DFSConfigKeys. DFS_DATANODE_RECONCILE_BLOCKS_BATCH_SIZE, DFSConfigKeys. DFS_DATANODE_RECONCILE_BLOCKS_BATCH_SIZE_DEFAULT); if (reconcileBatchSize <= 0) { LOG.warn("Invalid value configured for " + "dfs.datanode.reconcile.blocks.batch.size, " + "should be greater than 0, Using default."); reconcileBatchSize = DFSConfigKeys. DFS_DATANODE_RECONCILE_BLOCKS_BATCH_SIZE_DEFAULT; } reconcileBlocksBatchSize = reconcileBatchSize; long reconcileBatchInterval = conf.getTimeDuration(DFSConfigKeys. DFS_DATANODE_RECONCILE_BLOCKS_BATCH_INTERVAL, DFSConfigKeys. DFS_DATANODE_RECONCILE_BLOCKS_BATCH_INTERVAL_DEFAULT, TimeUnit.MILLISECONDS); if (reconcileBatchInterval <= 0) { LOG.warn("Invalid value configured for " + "dfs.datanode.reconcile.blocks.batch.interval, " + "should be greater than 0, Using default."); reconcileBatchInterval = DFSConfigKeys. DFS_DATANODE_RECONCILE_BLOCKS_BATCH_INTERVAL_DEFAULT; } reconcileBlocksBatchInterval = reconcileBatchInterval; }
@Test(timeout = 600000) public void testDirectoryScanner() throws Exception { // Run the test with and without parallel scanning for (int parallelism = 1; parallelism < 3; parallelism++) { runTest(parallelism); } }
@Override public void setRuntimeContext(RuntimeContext context) { super.setRuntimeContext(context); if (format instanceof RichOutputFormat) { ((RichOutputFormat) format).setRuntimeContext(context); } }
@Test void setRuntimeContext() { RuntimeContext mockRuntimeContext = Mockito.mock(RuntimeContext.class); // Make sure setRuntimeContext of the rich output format is called RichOutputFormat<?> mockRichOutputFormat = Mockito.mock(RichOutputFormat.class); new OutputFormatSinkFunction<>(mockRichOutputFormat).setRuntimeContext(mockRuntimeContext); Mockito.verify(mockRichOutputFormat, Mockito.times(1)) .setRuntimeContext(Mockito.eq(mockRuntimeContext)); // Make sure setRuntimeContext work well when output format is not RichOutputFormat OutputFormat<?> mockOutputFormat = Mockito.mock(OutputFormat.class); new OutputFormatSinkFunction<>(mockOutputFormat).setRuntimeContext(mockRuntimeContext); }
public static ObjectId insertedId(@Nonnull InsertOneResult result) { final BsonValue insertedId = result.getInsertedId(); if (insertedId == null) { // this should only happen when inserting RawBsonDocuments throw new IllegalArgumentException("Inserted ID is null. Make sure that you are inserting documents of " + "type <? extends MongoEntity>."); } return insertedId.asObjectId().getValue(); }
@Test void testInsertedId() { final var id = "6627add0ee216425dd6df37c"; final var a = new DTO(id, "a"); assertThat(insertedId(collection.insertOne(a))).isEqualTo(new ObjectId(id)); assertThat(insertedId(collection.insertOne(new DTO(null, "b")))).isGreaterThan(new ObjectId(id)); }
public static Method getApplyMethod(ScalarFn scalarFn) { Class<? extends ScalarFn> clazz = scalarFn.getClass(); Collection<Method> matches = ReflectHelpers.declaredMethodsWithAnnotation( ScalarFn.ApplyMethod.class, clazz, ScalarFn.class); if (matches.isEmpty()) { throw new IllegalArgumentException( String.format( "No method annotated with @%s found in class %s.", ScalarFn.ApplyMethod.class.getSimpleName(), clazz.getName())); } // If we have at least one match, then either it should be the only match // or it should be an extension of the other matches (which came from parent // classes). Method first = matches.iterator().next(); for (Method other : matches) { if (!first.getName().equals(other.getName()) || !Arrays.equals(first.getParameterTypes(), other.getParameterTypes())) { throw new IllegalArgumentException( String.format( "Found multiple methods annotated with @%s. [%s] and [%s]", ScalarFn.ApplyMethod.class.getSimpleName(), ReflectHelpers.formatMethod(first), ReflectHelpers.formatMethod(other))); } } // Method must be public. if ((first.getModifiers() & Modifier.PUBLIC) == 0) { throw new IllegalArgumentException( String.format("Method %s is not public.", ReflectHelpers.formatMethod(first))); } return first; }
@Test @SuppressWarnings("nullness") // If result is null, test will fail as expected. public void testGetApplyMethod() throws InvocationTargetException, IllegalAccessException { IncrementFn incrementFn = new IncrementFn(); Method method = ScalarFnReflector.getApplyMethod(incrementFn); @Nullable Object result = method.invoke(incrementFn, Long.valueOf(24L)); assertEquals(Long.valueOf(25L), result); }
@Override public PiAction mapTreatment(TrafficTreatment treatment, PiTableId piTableId) throws PiInterpreterException { if (FORWARDING_CTRL_TBLS.contains(piTableId)) { return treatmentInterpreter.mapForwardingTreatment(treatment, piTableId); } else if (PRE_NEXT_CTRL_TBLS.contains(piTableId)) { return treatmentInterpreter.mapPreNextTreatment(treatment, piTableId); } else if (ACL_CTRL_TBLS.contains(piTableId)) { return treatmentInterpreter.mapAclTreatment(treatment, piTableId); } else if (NEXT_CTRL_TBLS.contains(piTableId)) { return treatmentInterpreter.mapNextTreatment(treatment, piTableId); } else if (E_NEXT_CTRL_TBLS.contains(piTableId)) { return treatmentInterpreter.mapEgressNextTreatment(treatment, piTableId); } else { throw new PiInterpreterException(format( "Treatment mapping not supported for table '%s'", piTableId)); } }
@Test public void testNextMplsTreatment() throws Exception { TrafficTreatment treatment = DefaultTrafficTreatment.builder() .setMpls(MPLS_10) .build(); PiAction mappedAction = interpreter.mapTreatment( treatment, FabricConstants.FABRIC_INGRESS_PRE_NEXT_NEXT_MPLS); PiActionParam mplsParam = new PiActionParam( FabricConstants.LABEL, MPLS_10.toInt()); PiAction expectedAction = PiAction.builder() .withId(FabricConstants.FABRIC_INGRESS_PRE_NEXT_SET_MPLS_LABEL) .withParameter(mplsParam) .build(); assertEquals(expectedAction, mappedAction); }
@Override public int getColumnCount() throws SQLException { return resultSetMetaData.getColumnCount(); }
@Test void assertGetColumnCount() throws SQLException { assertThat(queryResultMetaData.getColumnCount(), is(1)); }
public void initializeSession(AuthenticationRequest authenticationRequest, SAMLBindingContext bindingContext) throws SamlSessionException, SharedServiceClientException { final String httpSessionId = authenticationRequest.getRequest().getSession().getId(); if (authenticationRequest.getFederationName() != null) { findOrInitializeFederationSession(authenticationRequest, httpSessionId); } findOrInitializeSamlSession(authenticationRequest, httpSessionId, bindingContext); }
@Test public void validRequesterIdIsPresent() throws SamlSessionException, SharedServiceClientException { RequesterID requesterID = OpenSAMLUtils.buildSAMLObject(RequesterID.class); requesterID.setRequesterID("urn:nl-eid-gdi:1.0:BVD:00000004003214345001:entities:9000"); Scoping scoping = OpenSAMLUtils.buildSAMLObject(Scoping.class); scoping.getRequesterIDs().add(requesterID); authnRequest.setScoping(scoping); samlSessionService.initializeSession(authenticationRequest, bindingContext); assertEquals("urn:nl-eid-gdi:1.0:BVD:00000004003214345001:entities:9000", authenticationRequest.getSamlSession().getRequesterId()); }
public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } ValueParser parser = new ValueParser(new Parser(value)); return parser.parse(false); }
@Test public void shouldNotParseAsMapWithoutCommas() { SchemaAndValue schemaAndValue = Values.parseString("{6:9 4:20}"); assertEquals(Type.STRING, schemaAndValue.schema().type()); assertEquals("{6:9 4:20}", schemaAndValue.value()); }
@Override protected InputStream openObjectInputStream( long position, int bytesToRead) throws IOException { ObsObject object; try { GetObjectRequest getObjectRequest = new GetObjectRequest(mBucketName, mPath); getObjectRequest.setRangeStart(position); getObjectRequest.setRangeEnd(position + bytesToRead - 1); object = mClient.getObject(getObjectRequest); } catch (ObsException e) { String errorMessage = String .format("Failed to get object: %s bucket: %s", mPath, mBucketName); throw new IOException(errorMessage, e); } return object.getObjectContent(); }
@Test public void openObjectInputStream() throws Exception { ObsObject object = Mockito.mock(ObsObject.class); InputStream inputStream = Mockito.mock(InputStream.class); Mockito.when(mClient.getObject(ArgumentMatchers.any( GetObjectRequest.class))).thenReturn(object); Mockito.when(object.getObjectContent()).thenReturn(inputStream); // test successful open object input stream long position = 0L; int bytesToRead = 10; Object objectInputStream = mOBSPositionReader.openObjectInputStream(position, bytesToRead); Assert.assertTrue(objectInputStream instanceof InputStream); // test open object input stream with exception Mockito.when(mClient.getObject(ArgumentMatchers.any(GetObjectRequest.class))) .thenThrow(ObsException.class); try { mOBSPositionReader.openObjectInputStream(position, bytesToRead); } catch (Exception e) { Assert.assertTrue(e instanceof IOException); String errorMessage = String .format("Failed to get object: %s bucket: %s", mPath, mBucketName); Assert.assertEquals(errorMessage, e.getMessage()); } }
public Mono<Void> resetToEarliest( KafkaCluster cluster, String group, String topic, Collection<Integer> partitions) { return checkGroupCondition(cluster, group) .flatMap(ac -> offsets(ac, topic, partitions, OffsetSpec.earliest()) .flatMap(offsets -> resetOffsets(ac, group, offsets))); }
@Test void resetToEarliest() { sendMsgsToPartition(Map.of(0, 10, 1, 10, 2, 10)); commit(Map.of(0, 5L, 1, 5L, 2, 5L)); offsetsResetService.resetToEarliest(cluster, groupId, topic, List.of(0, 1)).block(); assertOffsets(Map.of(0, 0L, 1, 0L, 2, 5L)); commit(Map.of(0, 5L, 1, 5L, 2, 5L)); offsetsResetService.resetToEarliest(cluster, groupId, topic, null).block(); assertOffsets(Map.of(0, 0L, 1, 0L, 2, 0L, 3, 0L, 4, 0L)); }
public void execute() { new PathAwareCrawler<>( FormulaExecutorComponentVisitor.newBuilder(metricRepository, measureRepository).buildFor(formulas)) .visit(treeRootHolder.getReportTreeRoot()); }
@Test public void compute_duplicated_blocks_does_not_count_blocks_only_once_it_assumes_consistency_from_duplication_data() { duplicationRepository.addDuplication(FILE_1_REF, new TextBlock(1, 1), new TextBlock(3, 3)); duplicationRepository.addDuplication(FILE_1_REF, new TextBlock(2, 2), new TextBlock(3, 3)); underTest.execute(); assertRawMeasureValue(FILE_1_REF, DUPLICATED_BLOCKS_KEY, 4); }
public Row addRow() { Row r = new Row(); rows.add(r); return r; }
@Test(expected = IllegalArgumentException.class) public void rowBadColumn() { tm = new TableModel(FOO, BAR); tm.addRow().cell(ZOO, 2); }
public static Locale createLocale( String localeCode ) { if ( Utils.isEmpty( localeCode ) ) { return null; } StringTokenizer parser = new StringTokenizer( localeCode, "_" ); if ( parser.countTokens() == 2 ) { return new Locale( parser.nextToken(), parser.nextToken() ); } if ( parser.countTokens() == 3 ) { return new Locale( parser.nextToken(), parser.nextToken(), parser.nextToken() ); } return new Locale( localeCode ); }
@Test public void createLocale_Empty() throws Exception { assertNull( EnvUtil.createLocale( "" ) ); }
@Override public long write(Sample sample) { Validate.validState(writer != null, "No writer set! Call setWriter() first!"); StringBuilder row = new StringBuilder(); char[] specials = new char[] { separator, CSVSaveService.QUOTING_CHAR, CharUtils.CR, CharUtils.LF }; for (int i = 0; i < columnCount; i++) { String data = sample.getData(i); row.append(CSVSaveService.quoteDelimiters(data, specials)) .append(separator); } row.setLength(row.length() - 1); writer.println(row.toString()); sampleCount++; return sampleCount; }
@Test public void testWriteWithoutWriter() throws Exception { try (CsvSampleWriter csvWriter = new CsvSampleWriter(metadata)) { Sample sample = new SampleBuilder(metadata).add("a1").add("b1").build(); try { csvWriter.write(sample); fail("ISE expected"); } catch (IllegalStateException e) { // OK, we should land here } } }
public static ColumnIndex build( PrimitiveType type, BoundaryOrder boundaryOrder, List<Boolean> nullPages, List<Long> nullCounts, List<ByteBuffer> minValues, List<ByteBuffer> maxValues) { return build(type, boundaryOrder, nullPages, nullCounts, minValues, maxValues, null, null); }
@Test public void testStaticBuildBoolean() { ColumnIndex columnIndex = ColumnIndexBuilder.build( Types.required(BOOLEAN).named("test_boolean"), BoundaryOrder.DESCENDING, asList(false, true, false, true, false, true), asList(9l, 8l, 7l, 6l, 5l, 0l), toBBList(false, null, false, null, true, null), toBBList(true, null, false, null, true, null)); assertEquals(BoundaryOrder.DESCENDING, columnIndex.getBoundaryOrder()); assertCorrectNullCounts(columnIndex, 9, 8, 7, 6, 5, 0); assertCorrectNullPages(columnIndex, false, true, false, true, false, true); assertCorrectValues(columnIndex.getMaxValues(), true, null, false, null, true, null); assertCorrectValues(columnIndex.getMinValues(), false, null, false, null, true, null); }
@Override public synchronized void editSchedule() { updateConfigIfNeeded(); long startTs = clock.getTime(); CSQueue root = scheduler.getRootQueue(); Resource clusterResources = Resources.clone(scheduler.getClusterResource()); containerBasedPreemptOrKill(root, clusterResources); if (LOG.isDebugEnabled()) { LOG.debug("Total time used=" + (clock.getTime() - startTs) + " ms."); } }
@Test public void testPerQueueDisablePreemptionOverAbsMaxCapacity() { int[][] qData = new int[][] { // / A D // B C E F {1000, 725, 360, 365, 275, 17, 258 }, // absCap {1000,1000,1000,1000, 550, 109,1000 }, // absMaxCap {1000, 741, 396, 345, 259, 110, 149 }, // used { 40, 20, 0, 20, 20, 20, 0 }, // pending { 0, 0, 0, 0, 0, 0, 0 }, // reserved // appA appB appC appD { 4, 2, 1, 1, 2, 1, 1 }, // apps { -1, -1, 1, 1, -1, 1, 1 }, // req granulrity { 2, 2, 0, 0, 2, 0, 0 }, // subqueues }; // QueueE inherits non-preemption from QueueD conf.setPreemptionDisabled(QUEUE_D, true); ProportionalCapacityPreemptionPolicy policy = buildPolicy(qData); policy.editSchedule(); // appC is running on QueueE. QueueE is over absMaxCap, but is not // preemptable. Therefore, appC resources should not be preempted. verify(mDisp, never()).handle(argThat(new IsPreemptionRequestFor(appC))); }
public Iterator<Optional<Page>> process(SqlFunctionProperties properties, DriverYieldSignal yieldSignal, LocalMemoryContext memoryContext, Page page) { WorkProcessor<Page> processor = createWorkProcessor(properties, yieldSignal, memoryContext, page); return processor.yieldingIterator(); }
@Test public void testFilterNoColumns() { PageProcessor pageProcessor = new PageProcessor(Optional.of(new TestingPageFilter(positionsRange(0, 50))), ImmutableList.of()); Page inputPage = new Page(createLongSequenceBlock(0, 100)); LocalMemoryContext memoryContext = newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()); Iterator<Optional<Page>> output = pageProcessor.process(SESSION.getSqlFunctionProperties(), new DriverYieldSignal(), memoryContext, inputPage); assertEquals(memoryContext.getBytes(), 0); List<Optional<Page>> outputPages = ImmutableList.copyOf(output); assertEquals(outputPages.size(), 1); Page outputPage = outputPages.get(0).orElse(null); assertEquals(outputPage.getChannelCount(), 0); assertEquals(outputPage.getPositionCount(), 50); }
@ApiOperation(value = "Get a user’s picture", produces = "application/octet-stream", tags = { "Users" }, notes = "The response body contains the raw picture data, representing the user’s picture. The Content-type of the response corresponds to the mimeType that was set when creating the picture.") @ApiResponses(value = { @ApiResponse(code = 200, message = "Indicates the user was found and has a picture, which is returned in the body."), @ApiResponse(code = 404, message = "Indicates the requested user was not found or the user does not have a profile picture. Status-description contains additional information about the error.") }) @GetMapping(value = "/identity/users/{userId}/picture") public ResponseEntity<byte[]> getUserPicture(@ApiParam(name = "userId") @PathVariable String userId) { User user = getUserFromRequest(userId); Picture userPicture = identityService.getUserPicture(user.getId()); if (userPicture == null) { throw new FlowableObjectNotFoundException("The user with id '" + user.getId() + "' does not have a picture.", Picture.class); } HttpHeaders responseHeaders = new HttpHeaders(); if (userPicture.getMimeType() != null) { responseHeaders.set("Content-Type", userPicture.getMimeType()); } else { responseHeaders.set("Content-Type", "image/jpeg"); } try { return new ResponseEntity<>(IOUtils.toByteArray(userPicture.getInputStream()), responseHeaders, HttpStatus.OK); } catch (Exception e) { throw new FlowableException("Error exporting picture: " + e.getMessage(), e); } }
@Test public void testGetUserPicture() throws Exception { User savedUser = null; try { User newUser = identityService.newUser("testuser"); newUser.setFirstName("Fred"); newUser.setLastName("McDonald"); newUser.setEmail("no-reply@activiti.org"); identityService.saveUser(newUser); savedUser = newUser; // Create picture for user Picture thePicture = new Picture("this is the picture raw byte stream".getBytes(), "image/png"); identityService.setUserPicture(newUser.getId(), thePicture); CloseableHttpResponse response = executeRequest( new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_USER_PICTURE, newUser.getId())), HttpStatus.SC_OK); try (InputStream contentStream = response.getEntity().getContent()) { assertThat(contentStream).hasContent("this is the picture raw byte stream"); } // Check if media-type is correct assertThat(response.getEntity().getContentType().getValue()).isEqualTo("image/png"); closeResponse(response); } finally { // Delete user after test passes or fails if (savedUser != null) { identityService.deleteUser(savedUser.getId()); } } }
public void inject(Inspector inspector, Inserter inserter) { if (inspector.valid()) { injectValue(inserter, inspector, null); } }
@Test public void injectIntoObject() { f2.slime1.setObject(); inject(f1.empty.get(), new ObjectInserter(f2.slime1.get(), "a")); inject(f1.nixValue.get(), new ObjectInserter(f2.slime1.get(), "b")); inject(f1.boolValue.get(), new ObjectInserter(f2.slime1.get(), "c")); inject(f1.longValue.get(), new ObjectInserter(f2.slime1.get(), "d")); inject(f1.doubleValue.get(), new ObjectInserter(f2.slime1.get(), "e")); inject(f1.stringValue.get(), new ObjectInserter(f2.slime1.get(), "f")); inject(f1.dataValue.get(), new ObjectInserter(f2.slime1.get(), "g")); inject(f1.arrayValue.get(), new ObjectInserter(f2.slime1.get(), "h")); inject(f1.objectValue.get(), new ObjectInserter(f2.slime1.get(), "i")); assertEquals(f1.empty.get().toString(), f2.slime1.get().field("a").toString()); assertEquals(f1.nixValue.get().toString(), f2.slime1.get().field("b").toString()); assertEquals(f1.boolValue.get().toString(), f2.slime1.get().field("c").toString()); assertEquals(f1.longValue.get().toString(), f2.slime1.get().field("d").toString()); assertEquals(f1.doubleValue.get().toString(), f2.slime1.get().field("e").toString()); assertEquals(f1.stringValue.get().toString(), f2.slime1.get().field("f").toString()); assertEquals(f1.dataValue.get().toString(), f2.slime1.get().field("g").toString()); assertEquals(f1.arrayValue.get().toString(), f2.slime1.get().field("h").toString()); assertEqualTo(f1.objectValue.get(), f2.slime1.get().field("i")); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldFindGenericMethodWithIntParam() { // Given: givenFunctions( function(EXPECTED, -1, GENERIC_LIST) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(Collections.singletonList(SqlArgument.of(SqlArray.of(SqlTypes.INTEGER)))); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
public void delete(final String key) { try { client.delete().guaranteed().deletingChildrenIfNeeded().forPath(key); } catch (Exception e) { throw new ShenyuException(e); } }
@Test void delete() throws Exception { assertThrows(ShenyuException.class, () -> client.delete("/test")); DeleteBuilder deleteBuilder = mock(DeleteBuilder.class); when(curatorFramework.delete()).thenReturn(deleteBuilder); ChildrenDeletable childrenDeletable = mock(ChildrenDeletable.class); when(deleteBuilder.guaranteed()).thenReturn(childrenDeletable); BackgroundVersionable backgroundVersionable = mock(BackgroundVersionable.class); when(childrenDeletable.deletingChildrenIfNeeded()).thenReturn(backgroundVersionable); doNothing().when(backgroundVersionable).forPath(anyString()); assertDoesNotThrow(() -> client.delete("/test")); }