focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { final long timestamp = clock.getTime() / 1000; // oh it'd be lovely to use Java 7 here try { graphite.connect(); for (Map.Entry<String, Gauge> entry : gauges.entrySet()) { reportGauge(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Meter> entry : meters.entrySet()) { reportMetered(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue(), timestamp); } graphite.flush(); } catch (IOException e) { LOGGER.warn("Unable to report to Graphite", graphite, e); } finally { try { graphite.close(); } catch (IOException e1) { LOGGER.warn("Error closing Graphite", graphite, e1); } } }
@Test public void reportsDoubleGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1.1)), map(), map(), map(), map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.gauge", "1.10", timestamp); inOrder.verify(graphite).flush(); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); }
@Override public boolean isEmpty() { return pseudoHeaders.length == 0 && otherHeaders.length == 0; }
@Test public void testIsNotEmpty() { Http2Headers headers = newTrailers(); assertFalse(headers.isEmpty()); }
public String abbreviate(final String name) { if(StringUtils.startsWith(name, preferences.getProperty("local.user.home"))) { return Local.HOME + StringUtils.removeStart(name, preferences.getProperty("local.user.home")); } return name; }
@Test public void testAbbreviate() { assertEquals("~/f", new TildeExpander().abbreviate(System.getProperty("user.home") + "/f")); }
public Optional<String> metadataLogDir() { return metadataLogDir; }
@Test public void testMetadataLogDirForEmpty() { assertEquals(Optional.empty(), EMPTY.metadataLogDir()); }
public Optional<Details> sync( @NotNull StepInstance instance, @NotNull WorkflowSummary workflowSummary, @NotNull StepRuntimeSummary stepSummary) { try { switch (stepSummary.getDbOperation()) { case INSERT: case UPSERT: instanceDao.insertOrUpsertStepInstance( instance, stepSummary.getDbOperation() == DbOperation.UPSERT); break; case UPDATE: instanceDao.updateStepInstance(workflowSummary, stepSummary); break; default: throw new MaestroInternalError( "Invalid DB operation: %s for step instance [%s][%s]", stepSummary.getDbOperation(), stepSummary.getStepId(), stepSummary.getStepAttemptId()); } if (!stepSummary.getPendingRecords().isEmpty()) { return jobEventPublisher.publish( StepInstanceUpdateJobEvent.create(instance, stepSummary.getPendingRecords())); } return Optional.empty(); } catch (RuntimeException e) { return Optional.of(Details.create(e, true, "Failed to sync a Maestro step state change")); } }
@Test public void testUpsertSync() { StepRuntimeSummary stepRuntimeSummary = StepRuntimeSummary.builder() .stepId("test-summary") .stepAttemptId(2) .stepInstanceId(1) .dbOperation(DbOperation.UPSERT) .build(); Optional<Details> details = syncManager.sync(instance, workflowSummary, stepRuntimeSummary); assertFalse(details.isPresent()); verify(instanceDao, times(1)).insertOrUpsertStepInstance(instance, true); verify(publisher, times(0)).publish(any()); }
public ProjectDto validateProjectKey(String projectKey) { try (DbSession dbSession = dbClient.openSession(false)) { return componentFinder.getProjectByKey(dbSession, projectKey); } catch (NotFoundException e) { // To hide information about the existence or not of the project. throw insufficientPrivilegesException(); } }
@Test public void givenInvalidProjectKey_whenValidateProjectKey_thenThrowForbiddenException() { // given String projectKey = "invalidProjectKey"; DbSession dbSession = mockDbSession(); doThrow(NotFoundException.class).when(componentFinder).getProjectByKey(dbSession, projectKey); // when then assertThatThrownBy(() -> underTest.validateProjectKey(projectKey)) .withFailMessage("Insufficient privileges") .isInstanceOf(ForbiddenException.class); }
@Override public boolean trySetCapacity(int capacity) { return get(trySetCapacityAsync(capacity)); }
@Test public void testNameMapper() { Config config = createConfig(); config.useSingleServer() .setNameMapper(new NameMapper() { @Override public String map(String name) { return name + ":suffix:"; } @Override public String unmap(String name) { return name.replace(":suffix:", ""); } }); RedissonClient redisson = Redisson.create(config); RBoundedBlockingQueue<Integer> queue = redisson.getBoundedBlockingQueue("bounded-queue"); queue.trySetCapacity(5); queue.add(1); queue.delete(); assertThat(redisson.getKeys().count()).isZero(); }
public OpenConfigComponentsHandler addComponent(OpenConfigComponentHandler component) { modelObject.addToComponent(component.getModelObject()); return this; }
@Test public void testAddComponent() { // test Handler OpenConfigComponentsHandler components = new OpenConfigComponentsHandler(); // call addComponent OpenConfigComponentHandler component = new OpenConfigComponentHandler("name", components); // expected ModelObject DefaultComponents modelObject = new DefaultComponents(); DefaultComponent comp = new DefaultComponent(); comp.name("name"); modelObject.addToComponent(comp); assertEquals("[NG]addComponent:ModelObject(Component added) is not an expected one.\n", modelObject, components.getModelObject()); }
public abstract Map<String, String> properties(final Map<String, String> defaultProperties, final long additionalRetentionMs);
@Test public void shouldUseSuppliedConfigsForWindowedChangelogConfig() { final Map<String, String> configs = new HashMap<>(); configs.put("message.timestamp.type", "LogAppendTime"); final WindowedChangelogTopicConfig topicConfig = new WindowedChangelogTopicConfig("name", configs, 10); final Map<String, String> properties = topicConfig.properties(Collections.emptyMap(), 0); assertEquals("LogAppendTime", properties.get(TopicConfig.MESSAGE_TIMESTAMP_TYPE_CONFIG)); }
public Schema addToSchema(Schema schema) { validate(schema); schema.addProp(LOGICAL_TYPE_PROP, name); schema.setLogicalType(this); return schema; }
@Test void schemaRejectsSecondLogicalType() { final Schema schema = Schema.createFixed("aDecimal", null, null, 4); LogicalTypes.decimal(9).addToSchema(schema); assertThrows("Should reject second logical type", AvroRuntimeException.class, "Can't overwrite property: scale", () -> { LogicalTypes.decimal(9, 2).addToSchema(schema); return null; }); assertEquals(LogicalTypes.decimal(9), LogicalTypes.fromSchemaIgnoreInvalid(schema), "First logical type should still be set on schema"); }
public abstract boolean compare(A actual, E expected);
@Test public void testFrom_compare() { assertThat(STRING_PREFIX_EQUALITY.compare("foot", "foo")).isTrue(); assertThat(STRING_PREFIX_EQUALITY.compare("foot", "foot")).isTrue(); assertThat(STRING_PREFIX_EQUALITY.compare("foo", "foot")).isFalse(); }
@Override public SchemaResult getValueSchema( final Optional<String> topicName, final Optional<Integer> schemaId, final FormatInfo expectedFormat, final SerdeFeatures serdeFeatures ) { return getSchema(topicName, schemaId, expectedFormat, serdeFeatures, false); }
@Test public void shouldRequestCorrectSchemaOnGetValueSchemaWithId() throws Exception { // When: supplier.getValueSchema(Optional.of(TOPIC_NAME), Optional.of(42), expectedFormat, SerdeFeatures.of()); // Then: verify(srClient).getSchemaBySubjectAndId(TOPIC_NAME + "-value", 42); }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String dataType = typeDefine.getDataType().toUpperCase(); switch (dataType) { case REDSHIFT_BOOLEAN: builder.sourceType(REDSHIFT_BOOLEAN); builder.dataType(BasicType.BOOLEAN_TYPE); break; case REDSHIFT_SMALLINT: builder.sourceType(REDSHIFT_SMALLINT); builder.dataType(BasicType.SHORT_TYPE); break; case REDSHIFT_INTEGER: builder.sourceType(REDSHIFT_INTEGER); builder.dataType(BasicType.INT_TYPE); break; case REDSHIFT_BIGINT: builder.sourceType(REDSHIFT_BIGINT); builder.dataType(BasicType.LONG_TYPE); break; case REDSHIFT_REAL: builder.sourceType(REDSHIFT_REAL); builder.dataType(BasicType.FLOAT_TYPE); break; case REDSHIFT_DOUBLE_PRECISION: builder.sourceType(REDSHIFT_DOUBLE_PRECISION); builder.dataType(BasicType.DOUBLE_TYPE); break; case REDSHIFT_NUMERIC: Long precision = typeDefine.getPrecision(); Integer scale = typeDefine.getScale(); if (precision == null || precision <= 0) { precision = Long.valueOf(DEFAULT_PRECISION); scale = DEFAULT_SCALE; } else if (precision > MAX_PRECISION) { scale = scale - (int) (precision - MAX_PRECISION); precision = Long.valueOf(MAX_PRECISION); } builder.sourceType(String.format("%s(%d,%d)", REDSHIFT_NUMERIC, precision, scale)); builder.dataType(new DecimalType(Math.toIntExact(precision), scale)); break; case REDSHIFT_CHARACTER: Long characterLength = typeDefine.getLength(); if (characterLength == null || characterLength <= 0) { characterLength = Long.valueOf(MAX_CHARACTER_LENGTH); } builder.sourceType(String.format("%s(%d)", REDSHIFT_CHARACTER, characterLength)); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(characterLength); break; case REDSHIFT_CHARACTER_VARYING: Long characterVaryingLength = typeDefine.getLength(); if (characterVaryingLength == null || characterVaryingLength <= 0) { characterVaryingLength = Long.valueOf(MAX_CHARACTER_VARYING_LENGTH); } builder.sourceType( String.format( "%s(%d)", REDSHIFT_CHARACTER_VARYING, characterVaryingLength)); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(characterVaryingLength); break; case REDSHIFT_HLLSKETCH: builder.sourceType(REDSHIFT_HLLSKETCH); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(MAX_HLLSKETCH_LENGTH); break; case REDSHIFT_SUPER: builder.sourceType(REDSHIFT_SUPER); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(MAX_SUPER_LENGTH); break; case REDSHIFT_VARBYTE: case REDSHIFT_BINARY_VARYING: builder.sourceType( String.format( "%s(%d)", typeDefine.getDataType(), MAX_BINARY_VARYING_LENGTH)); builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(MAX_BINARY_VARYING_LENGTH); break; case REDSHIFT_TIME: builder.sourceType(REDSHIFT_TIME); builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); builder.scale(MAX_TIME_SCALE); break; case REDSHIFT_TIMETZ: builder.sourceType(REDSHIFT_TIMETZ); builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); builder.scale(MAX_TIME_SCALE); break; case REDSHIFT_TIMESTAMP: builder.sourceType(REDSHIFT_TIMESTAMP); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(MAX_TIMESTAMP_SCALE); break; case REDSHIFT_TIMESTAMPTZ: builder.sourceType(REDSHIFT_TIMESTAMPTZ); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(MAX_TIMESTAMP_SCALE); break; default: try { return super.convert(typeDefine); } catch (SeaTunnelRuntimeException e) { throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.REDSHIFT, typeDefine.getDataType(), typeDefine.getName()); } } return builder.build(); }
@Test public void testConvertBinary() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("VARBYTE") .dataType("VARBYTE") .build(); Column column = RedshiftTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(PrimitiveByteArrayType.INSTANCE, column.getDataType()); Assertions.assertEquals( RedshiftTypeConverter.MAX_BINARY_VARYING_LENGTH, column.getColumnLength()); Assertions.assertEquals( String.format( "%s(%s)", RedshiftTypeConverter.REDSHIFT_VARBYTE, RedshiftTypeConverter.MAX_BINARY_VARYING_LENGTH), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("BINARY VARYING") .dataType("BINARY VARYING") .build(); column = RedshiftTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(PrimitiveByteArrayType.INSTANCE, column.getDataType()); Assertions.assertEquals( RedshiftTypeConverter.MAX_BINARY_VARYING_LENGTH, column.getColumnLength()); Assertions.assertEquals( String.format( "%s(%s)", RedshiftTypeConverter.REDSHIFT_BINARY_VARYING, RedshiftTypeConverter.MAX_BINARY_VARYING_LENGTH), column.getSourceType()); }
public Optional<Measure> toMeasure(@Nullable LiveMeasureDto measureDto, Metric metric) { requireNonNull(metric); if (measureDto == null) { return Optional.empty(); } Double value = measureDto.getValue(); String data = measureDto.getDataAsString(); switch (metric.getType().getValueType()) { case INT: return toIntegerMeasure(value, data); case LONG: return toLongMeasure(value, data); case DOUBLE: return toDoubleMeasure(value, data); case BOOLEAN: return toBooleanMeasure(value, data); case STRING: return toStringMeasure(data); case LEVEL: return toLevelMeasure(data); case NO_VALUE: return toNoValueMeasure(); default: throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType()); } }
@Test public void toMeasure_returns_no_value_if_dta_has_data_in_wrong_case_for_Level_Metric() { Optional<Measure> measure = underTest.toMeasure(new LiveMeasureDto().setData("waRn"), SOME_LEVEL_METRIC); assertThat(measure).isPresent(); assertThat(measure.get().getValueType()).isEqualTo(Measure.ValueType.NO_VALUE); }
@Override public CheckpointStreamFactory resolveCheckpointStorageLocation( long checkpointId, CheckpointStorageLocationReference reference) throws IOException { if (reference.isDefaultReference()) { // default reference, construct the default location for that particular checkpoint final Path checkpointDir = createCheckpointDirectory(checkpointsDirectory, checkpointId); return new FsCheckpointStorageLocation( fileSystem, checkpointDir, sharedStateDirectory, taskOwnedStateDirectory, reference, fileSizeThreshold, writeBufferSize); } else { // location encoded in the reference final Path path = decodePathFromReference(reference); return new FsCheckpointStorageLocation( path.getFileSystem(), path, path, path, reference, fileSizeThreshold, writeBufferSize); } }
@Test void testResolveCheckpointStorageLocation() throws Exception { final FileSystem checkpointFileSystem = mock(FileSystem.class); final FsCheckpointStorageAccess storage = new FsCheckpointStorageAccess( new TestingPath("hdfs:///checkpoint/", checkpointFileSystem), null, true, new JobID(), FILE_SIZE_THRESHOLD, WRITE_BUFFER_SIZE); final FsCheckpointStorageLocation checkpointStreamFactory = (FsCheckpointStorageLocation) storage.resolveCheckpointStorageLocation( 1L, CheckpointStorageLocationReference.getDefault()); assertThat(checkpointStreamFactory.getFileSystem()).isEqualTo(checkpointFileSystem); final CheckpointStorageLocationReference savepointLocationReference = AbstractFsCheckpointStorageAccess.encodePathAsReference( new Path("file:///savepoint/")); final FsCheckpointStorageLocation savepointStreamFactory = (FsCheckpointStorageLocation) storage.resolveCheckpointStorageLocation(2L, savepointLocationReference); final FileSystem fileSystem = savepointStreamFactory.getFileSystem(); assertThat(fileSystem).isInstanceOf(LocalFileSystem.class); }
@Override public void execute(Runnable command) { if (command == null) { throw new NullPointerException(); } try { super.execute(command); } catch (RejectedExecutionException rx) { // retry to offer the task into queue. final TaskQueue queue = (TaskQueue) super.getQueue(); try { if (!queue.retryOffer(command, 0, TimeUnit.MILLISECONDS)) { throw new RejectedExecutionException("Queue capacity is full.", rx); } } catch (InterruptedException x) { throw new RejectedExecutionException(x); } } }
@Test void testEagerThreadPool_rejectExecution2() { String name = "eager-tf"; int cores = 1; int threads = 3; int queues = 2; long alive = 1000; // init queue and executor AtomicReference<Runnable> runnableWhenRetryOffer = new AtomicReference<>(); TaskQueue<Runnable> taskQueue = new TaskQueue<Runnable>(queues) { @Override public boolean retryOffer(Runnable o, long timeout, TimeUnit unit) throws InterruptedException { if (runnableWhenRetryOffer.get() != null) { runnableWhenRetryOffer.get().run(); } return super.retryOffer(o, timeout, unit); } }; final EagerThreadPoolExecutor executor = new EagerThreadPoolExecutor( cores, threads, alive, TimeUnit.MILLISECONDS, taskQueue, new NamedThreadFactory(name, true), new AbortPolicyWithReport(name, URL)); taskQueue.setExecutor(executor); Semaphore semaphore = new Semaphore(0); Runnable runnable = () -> { try { semaphore.acquire(); } catch (InterruptedException e) { throw new RuntimeException(e); } }; for (int i = 0; i < 5; i++) { executor.execute(runnable); } await().until(() -> executor.getPoolSize() == threads); await().until(() -> executor.getQueue().size() == queues); Assertions.assertThrows(RejectedExecutionException.class, () -> executor.execute(runnable)); runnableWhenRetryOffer.set(() -> { semaphore.release(); await().until(() -> executor.getCompletedTaskCount() == 1); }); executor.execute(runnable); semaphore.release(5); await().until(() -> executor.getActiveCount() == 0); }
public static Base64URL getAccessTokenHash(JWSAlgorithm signingAlg, OAuth2AccessTokenEntity token) { byte[] tokenBytes = token.getJwt().serialize().getBytes(); return getHash(signingAlg, tokenBytes); }
@Test public void getAccessTokenHash512() { /* * independently generate hash ascii of token = eyJhbGciOiJub25lIn0.eyJhbGciOiJSUzUxMiIsInN1YiI6ImRpZmZlcmVudF91c2VyIiwiaXNzIjoid3d3LmRpZmZlcmVudC5jb20iLCJ0eXAiOiJKV1QifQ. base64url of hash = vGH3QMY-knpACkLgzdkTqu3C9jtvbf2Wk_RSu2vAx8k */ mockToken512.getJwt().serialize(); Base64URL expectedHash = new Base64URL("vGH3QMY-knpACkLgzdkTqu3C9jtvbf2Wk_RSu2vAx8k"); Base64URL resultHash = IdTokenHashUtils.getAccessTokenHash(JWSAlgorithm.RS512, mockToken512); assertEquals(expectedHash, resultHash); }
public static ConfigurableResource parseResourceConfigValue(String value) throws AllocationConfigurationException { return parseResourceConfigValue(value, Long.MAX_VALUE); }
@Test public void testAbsoluteVcoresNegativeWithMoreSpaces() throws Exception { expectNegativeValueOfResource("vcores"); parseResourceConfigValue("5120mb mb, -2 vcores"); }
@PublicAPI(usage = ACCESS) public JavaClasses importUrl(URL url) { return importUrls(singletonList(url)); }
@Test public void is_resilient_against_broken_ClassFileSources() throws MalformedURLException { JavaClasses classes = new ClassFileImporter().importUrl(new File("/broken.class").toURI().toURL()); assertThat(classes).isEmpty(); classes = new ClassFileImporter().importUrl(new File("/broken.jar").toURI().toURL()); assertThat(classes).isEmpty(); }
public double[][] test(DataFrame data) { DataFrame x = formula.x(data); int n = x.nrow(); int ntrees = models.length; double[][] prediction = new double[ntrees][n]; for (int j = 0; j < n; j++) { Tuple xj = x.get(j); double base = 0; for (int i = 0; i < ntrees; i++) { base = base + models[i].tree.predict(xj); prediction[i][j] = base / (i+1); } } return prediction; }
@Test public void testCalHousing() { test("cal_housing", CalHousing.formula, CalHousing.data, 58605.0710); }
public static ArchivedExecutionGraph createSparseArchivedExecutionGraph( JobID jobId, String jobName, JobStatus jobStatus, @Nullable JobType jobType, @Nullable Throwable throwable, @Nullable JobCheckpointingSettings checkpointingSettings, long initializationTimestamp) { return createSparseArchivedExecutionGraph( jobId, jobName, jobStatus, jobType, Collections.emptyMap(), Collections.emptyList(), throwable, checkpointingSettings, initializationTimestamp); }
@Test void testCheckpointSettingsArchiving() { final CheckpointCoordinatorConfiguration checkpointCoordinatorConfiguration = CheckpointCoordinatorConfiguration.builder().build(); final ArchivedExecutionGraph archivedGraph = ArchivedExecutionGraph.createSparseArchivedExecutionGraph( new JobID(), "TestJob", JobStatus.INITIALIZING, JobType.STREAMING, null, new JobCheckpointingSettings(checkpointCoordinatorConfiguration, null), System.currentTimeMillis()); assertContainsCheckpointSettings(archivedGraph); }
public Optional<PinotQueryGeneratorResult> generate(PlanNode plan, ConnectorSession session) { try { PinotQueryGeneratorContext context = requireNonNull(plan.accept( new PinotQueryPlanVisitor(session), new PinotQueryGeneratorContext()), "Resulting context is null"); return Optional.of(new PinotQueryGeneratorResult(context.toQuery(pinotConfig, session), context)); } catch (PinotException e) { log.debug(e, "Possibly benign error when pushing plan into scan node %s", plan); return Optional.empty(); } }
@Test public void testDefaultTopNPushdown() { PlanBuilder planBuilder = createPlanBuilder(defaultSessionHolder); TableScanNode tableScanNode = tableScan(planBuilder, pinotTable, city, fare); AggregationNode aggregationNode = planBuilder.aggregation( aggregationNodeBuilder -> aggregationNodeBuilder .source(tableScanNode) .singleGroupingSet(variable("city")) .addAggregation(planBuilder.variable("sum_fare"), getRowExpression("sum(fare)", defaultSessionHolder))); pinotConfig.setPushdownTopNBrokerQueries(true); TopNNode topN = new TopNNode(Optional.empty(), planBuilder.getIdAllocator().getNextId(), aggregationNode, 1000, new OrderingScheme(ImmutableList.of(new Ordering(variable("sum_fare"), SortOrder.ASC_NULLS_FIRST))), TopNNode.Step.SINGLE); Optional<PinotQueryGenerator.PinotQueryGeneratorResult> generatedQuery = new PinotQueryGenerator(pinotConfig, functionAndTypeManager, functionAndTypeManager, standardFunctionResolution) .generate(topN, defaultSessionHolder.getConnectorSession()); assertTrue(generatedQuery.isPresent()); SessionHolder sessionHolder = new SessionHolder(pinotConfig); testPinotQuery( pinotConfig, aggregationNode, "SELECT \"city\", sum(\"fare\") FROM realtimeOnly GROUP BY \"city\" LIMIT 10000", sessionHolder, ImmutableMap.of()); testPinotQuery( pinotConfig, topN, "SELECT \"city\", sum(\"fare\") FROM realtimeOnly GROUP BY \"city\" ORDER BY sum(\"fare\") LIMIT 1000", sessionHolder, ImmutableMap.of()); }
static VertexWithInputConfig join( DAG dag, String mapName, String tableName, JetJoinInfo joinInfo, KvRowProjector.Supplier rightRowProjectorSupplier ) { int leftEquiJoinPrimitiveKeyIndex = leftEquiJoinPrimitiveKeyIndex(joinInfo, rightRowProjectorSupplier.paths()); if (leftEquiJoinPrimitiveKeyIndex > -1) { // This branch handles the case when there's an equi-join condition for the __key field. // For example: SELECT * FROM left [LEFT] JOIN right ON left.field1=right.__key // In this case we'll use map.get() for the right map to get the matching entry by key and evaluate the // remaining conditions on the returned row. return new VertexWithInputConfig( dag.newUniqueVertex( "Join(Lookup-" + tableName + ")", new JoinByPrimitiveKeyProcessorSupplier( joinInfo.isInner(), leftEquiJoinPrimitiveKeyIndex, joinInfo.condition(), mapName, rightRowProjectorSupplier ) ), edge -> edge.distributed().partitioned(extractPrimitiveKeyFn(leftEquiJoinPrimitiveKeyIndex)) ); } else if (joinInfo.isEquiJoin()) { // This branch handles the case when there's an equi-join, but not for __key (that was handled above) // For example: SELECT * FROM left JOIN right ON left.field1=right.field1 // In this case we'll construct a com.hazelcast.query.Predicate that will find matching rows using // the `map.entrySet(predicate)` method. assert joinInfo.isLeftOuter() || joinInfo.isInner(); return new VertexWithInputConfig( dag.newUniqueVertex( "Join(Predicate-" + tableName + ")", JoinByEquiJoinProcessorSupplier.supplier(joinInfo, mapName, rightRowProjectorSupplier) ), edge -> { // In case of an inner join we'll use `entrySet(predicate, partitionIdSet)` - we'll fan-out each // left item to all members and each member will query a subset of partitions (the local ones). // Otherwise, a default edge is used (local unicast) if (joinInfo.isInner()) { edge.distributed().fanout(); } }); } else { // This is the fallback case when there's not an equi-join: it can be a cross-join or join on // another condition. For example: // SELECT * FROM houses h JOIN renters r WHERE h.rent BETWEEN r.min_rent AND r.max_rent return new VertexWithInputConfig( dag.newUniqueVertex( "Join(Scan-" + tableName + ")", new JoinScanProcessorSupplier(joinInfo, mapName, rightRowProjectorSupplier) ) ); } // TODO: detect and handle always-false condition ? }
@Test @Parameters(method = "joinTypes") public void test_joinByScan(JoinRelType joinType) { // given given(rightRowProjectorSupplier.paths()).willReturn(new QueryPath[]{VALUE_PATH}); given(dag.newUniqueVertex(contains("Scan"), isA(JoinScanProcessorSupplier.class))).willReturn(vertex); // when VertexWithInputConfig vertexWithConfig = Joiner.join( dag, "imap-name", "table-name", joinInfo(joinType, new int[0], new int[0]), rightRowProjectorSupplier ); // then assertThat(vertexWithConfig.vertex()).isEqualTo(vertex); assertThat(vertexWithConfig.configureEdgeFn()).isNull(); }
static ExecutorService getConfiguredExecutorService( CamelContext camelContext, String name, DynamicRouterConfiguration cfg, boolean useDefault) throws IllegalArgumentException { ExecutorServiceManager manager = camelContext.getExecutorServiceManager(); ObjectHelper.notNull(manager, ESM_NAME, camelContext); String exSvcRef = cfg.getExecutorService(); ExecutorService exSvcBean = cfg.getExecutorServiceBean(); String errorMessage = "ExecutorServiceRef '" + exSvcRef + "' not found in registry as an ExecutorService " + "instance or as a thread pool profile"; // The first (preferred) option is to use an explicitly-configured executor if the configuration has it return Optional.ofNullable(exSvcBean) // The second preference is to check for an executor service reference .or(() -> Optional.ofNullable(exSvcRef) // Try to get the referenced executor service .map(r -> lookupExecutorServiceRef(camelContext, name, cfg, r) // But, if the reference is specified in the config, // and could not be obtained, this is an error .orElseThrow(() -> new IllegalArgumentException(errorMessage)))) // The third and final option is to create a new "default" thread pool if the parameter // specifies to that the default thread pool should be used as a fallback .or(() -> useDefault ? Optional.of(manager.newDefaultThreadPool(cfg, name)) : Optional.empty()) // failing the above options, then no executor service is configured .orElse(null); }
@Test void testGetConfiguredExecutorServiceWithExecutorServiceBean() { when(mockConfig.getExecutorServiceBean()).thenReturn(existingThreadPool); when(camelContext.getExecutorServiceManager()).thenReturn(manager); ExecutorService result = DynamicRouterRecipientListHelper.getConfiguredExecutorService(camelContext, "someName", mockConfig, true); assertEquals(existingThreadPool, result); }
public String convert(Object o) { StringBuilder buf = new StringBuilder(); Converter<Object> p = headTokenConverter; while (p != null) { buf.append(p.convert(o)); p = p.getNext(); } return buf.toString(); }
@Test public void date() { Calendar cal = Calendar.getInstance(); cal.set(2003, 4, 20, 17, 55); FileNamePattern pp = new FileNamePattern("foo%d{yyyy.MM.dd}", context); assertEquals("foo2003.05.20", pp.convert(cal.getTime())); pp = new FileNamePattern("foo%d{yyyy.MM.dd HH:mm}", context); assertEquals("foo2003.05.20 17:55", pp.convert(cal.getTime())); pp = new FileNamePattern("%d{yyyy.MM.dd HH:mm} foo", context); assertEquals("2003.05.20 17:55 foo", pp.convert(cal.getTime())); }
public static FromEndOfWindow pastEndOfWindow() { return new FromEndOfWindow(); }
@Test public void testEarlyAndLateOnMergeAlreadyFinished() throws Exception { tester = TriggerStateMachineTester.forTrigger( AfterWatermarkStateMachine.pastEndOfWindow() .withEarlyFirings(AfterPaneStateMachine.elementCountAtLeast(100)) .withLateFirings(AfterPaneStateMachine.elementCountAtLeast(1)), Sessions.withGapDuration(Duration.millis(10))); tester.injectElements(1); tester.injectElements(5); IntervalWindow firstWindow = new IntervalWindow(new Instant(1), new Instant(11)); IntervalWindow secondWindow = new IntervalWindow(new Instant(5), new Instant(15)); IntervalWindow mergedWindow = new IntervalWindow(new Instant(1), new Instant(15)); // Finish the AfterWatermark.pastEndOfWindow() bit of the trigger in both windows tester.advanceInputWatermark(new Instant(15)); assertTrue(tester.shouldFire(firstWindow)); assertTrue(tester.shouldFire(secondWindow)); tester.fireIfShouldFire(firstWindow); tester.fireIfShouldFire(secondWindow); // Confirm that we are on the late trigger by probing assertFalse(tester.shouldFire(firstWindow)); assertFalse(tester.shouldFire(secondWindow)); tester.injectElements(1); tester.injectElements(5); assertTrue(tester.shouldFire(firstWindow)); assertTrue(tester.shouldFire(secondWindow)); tester.fireIfShouldFire(firstWindow); tester.fireIfShouldFire(secondWindow); // Merging should leave it on the late trigger tester.mergeWindows(); // Confirm that we are on the late trigger by probing assertFalse(tester.shouldFire(mergedWindow)); tester.injectElements(1); assertTrue(tester.shouldFire(mergedWindow)); }
public static Optional<SingleMetaDataValidator> newInstance(final SQLStatement sqlStatement) { if (sqlStatement instanceof DropSchemaStatement) { return Optional.of(new SingleDropSchemaMetaDataValidator()); } if (sqlStatement instanceof DropTableStatement) { return Optional.of(new SingleDropTableValidator()); } return Optional.empty(); }
@Test void assertNewInstanceForNotDropSchemaStatement() { assertFalse(SingleMetaDataValidatorFactory.newInstance(mock(SQLStatement.class)).isPresent()); }
public static boolean isInventoryFinished(final int jobShardingCount, final Collection<TransmissionJobItemProgress> jobItemProgresses) { return isAllProgressesFilled(jobShardingCount, jobItemProgresses) && isAllInventoryTasksCompleted(jobItemProgresses); }
@Test void assertIsInventoryFinishedWhenJobCountDoesNotMatchJobItemProgresses() { TransmissionJobItemProgress transmissionJobItemProgress = new TransmissionJobItemProgress(); assertFalse(PipelineJobProgressDetector.isInventoryFinished(2, Collections.singleton(transmissionJobItemProgress))); }
public static StatementExecutorResponse execute( final ConfiguredStatement<DescribeFunction> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final DescribeFunction describeFunction = statement.getStatement(); final FunctionName functionName = FunctionName.of(describeFunction.getFunctionName()); if (executionContext.getMetaStore().isAggregate(functionName)) { return StatementExecutorResponse.handled(Optional.of( describeAggregateFunction(executionContext, functionName, statement.getMaskedStatementText()))); } if (executionContext.getMetaStore().isTableFunction(functionName)) { return StatementExecutorResponse.handled(Optional.of( describeTableFunction(executionContext, functionName, statement.getMaskedStatementText()))); } return StatementExecutorResponse.handled(Optional.of( describeNonAggregateFunction(executionContext, functionName, statement.getMaskedStatementText()))); }
@Test public void shouldDescribeUDAFWithInitialArgs() { // When: final FunctionDescriptionList functionList = (FunctionDescriptionList) CustomExecutors.DESCRIBE_FUNCTION.execute( engine.configure("DESCRIBE FUNCTION LATEST_BY_OFFSET;"), mock(SessionProperties.class), engine.getEngine(), engine.getServiceContext() ).getEntity().orElseThrow(IllegalStateException::new); // Then: assertThat(functionList, new TypeSafeMatcher<FunctionDescriptionList>() { @Override protected boolean matchesSafely(final FunctionDescriptionList item) { return functionList.getName().equals("LATEST_BY_OFFSET") && functionList.getType().equals(FunctionType.AGGREGATE); } @Override public void describeTo(final Description description) { description.appendText(functionList.getName()); } }); }
@VisibleForTesting String buildBody(Stream stream, AlertCondition.CheckResult checkResult, List<Message> backlog) { final String template; if (pluginConfig == null || pluginConfig.getString("body") == null) { template = bodyTemplate; } else { template = pluginConfig.getString("body"); } Map<String, Object> model = getModel(stream, checkResult, backlog); return this.templateEngine.transform(template, model); }
@Test public void buildBodyContainsURLIfWebInterfaceURLIsSet() throws Exception { final EmailConfiguration configuration = new EmailConfiguration() { @Override public URI getWebInterfaceUri() { return URI.create("https://localhost"); } }; this.emailAlertSender = new FormattedEmailAlertSender(configuration, mockNotificationService, nodeId, templateEngine, emailFactory); Stream stream = mock(Stream.class); when(stream.getId()).thenReturn("123456"); when(stream.getTitle()).thenReturn("Stream Title"); AlertCondition alertCondition = mock(AlertCondition.class); AlertCondition.CheckResult checkResult = mock(AbstractAlertCondition.CheckResult.class); when(checkResult.getTriggeredAt()).thenReturn(new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC)); when(checkResult.getTriggeredCondition()).thenReturn(alertCondition); String body = emailAlertSender.buildBody(stream, checkResult, Collections.<Message>emptyList()); assertThat(body).contains("Stream URL: https://localhost/streams/123456/"); }
@Override public String pathPattern() { return buildExtensionPathPattern(scheme); }
@Test void shouldBuildPathPatternCorrectly() { var scheme = Scheme.buildFromType(FakeExtension.class); var createHandler = new ExtensionCreateHandler(scheme, client); var pathPattern = createHandler.pathPattern(); assertEquals("/apis/fake.halo.run/v1alpha1/fakes", pathPattern); }
public static boolean withinDateRange(long date, long compareTo, int dayRange) { // ms = dayRange x 24 hours/day x 60 min/hour x 60 sec/min x 1000 ms/sec final long msRange = dayRange * 24L * 60L * 60L; return (compareTo - date) < msRange; }
@Test public void testWithinDateRange() { Calendar c = Calendar.getInstance(); long current = c.getTimeInMillis() / 1000; long lastRun = current - (3 * (60 * 60 * 24)); int range = 7; // 7 days boolean expResult = true; boolean result = DateUtil.withinDateRange(lastRun, current, range); assertEquals(expResult, result); lastRun = c.getTimeInMillis() / 1000 - (8 * (60 * 60 * 24)); expResult = false; result = DateUtil.withinDateRange(lastRun, current, range); assertEquals(expResult, result); }
public static void mergeParams( Map<String, ParamDefinition> params, Map<String, ParamDefinition> paramsToMerge, MergeContext context) { if (paramsToMerge == null) { return; } Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream()) .forEach( name -> { ParamDefinition paramToMerge = paramsToMerge.get(name); if (paramToMerge == null) { return; } if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) { Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name); Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name); mergeParams( baseMap, toMergeMap, MergeContext.copyWithParentMode( context, params.getOrDefault(name, paramToMerge).getMode())); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else if (paramToMerge.getType() == ParamType.STRING_MAP && paramToMerge.isLiteral()) { Map<String, String> baseMap = stringMapValueOrEmpty(params, name); Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name); baseMap.putAll(toMergeMap); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else { params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, paramToMerge.getValue())); } }); }
@Test public void testMergeDisallowInvalidInternalMode() { InternalParamMode mode = InternalParamMode.RESERVED; for (ParamsMergeHelper.MergeContext context : Arrays.asList(definitionContext, upstreamMergeContext)) { AssertHelper.assertThrows( String.format("Should not allow modifying reserved modes, mode [%s]", mode), MaestroValidationException.class, "Cannot modify param with mode [CONSTANT] for parameter [tomerge]", new Runnable() { @SneakyThrows @Override public void run() { Map<String, ParamDefinition> allParams = parseParamDefMap( String.format( "{'tomerge': {'type': 'STRING','value': 'hello', 'internal_mode': '%s'}}", mode)); Map<String, ParamDefinition> paramsToMerge = parseParamDefMap("{'tomerge': {'type': 'STRING', 'value': 'goodbye'}}"); ParamsMergeHelper.mergeParams(allParams, paramsToMerge, context); } }); } }
public void onKeyEvent(@NotNull TypeAheadEvent keyEvent) { if (!myTerminalModel.isTypeAheadEnabled()) return; myTerminalModel.lock(); try { if (myTerminalModel.isUsingAlternateBuffer()) { resetState(); return; } TypeAheadTerminalModel.LineWithCursorX lineWithCursorX = myTerminalModel.getCurrentLineWithCursor(); long prevTypedTime = myLastTypedTime; myLastTypedTime = System.nanoTime(); long autoSyncDelay; if (myLatencyStatistics.getSampleSize() >= LATENCY_MIN_SAMPLES_TO_TURN_ON) { autoSyncDelay = Math.min(myLatencyStatistics.getMaxLatency(), MAX_TERMINAL_DELAY); } else { autoSyncDelay = MAX_TERMINAL_DELAY; } boolean hasTypedRecently = System.nanoTime() - prevTypedTime < autoSyncDelay; if (hasTypedRecently) { if (myOutOfSyncDetected) { return; } } else { myOutOfSyncDetected = false; } reevaluatePredictorState(hasTypedRecently); updateLeftMostCursorPosition(lineWithCursorX.myCursorX); if (myPredictions.isEmpty() && myClearPredictionsDebouncer != null) { myClearPredictionsDebouncer.call(); // start a timer that will clear predictions } TypeAheadPrediction prediction = createPrediction(lineWithCursorX, keyEvent); myPredictions.add(prediction); applyPredictions(); logger.debug("Created " + keyEvent.myEventType + " prediction"); } finally { myTerminalModel.unlock(); } }
@Test public void testTentativeCursorMovePrediction() throws Exception { new TestRunner() { @Override void run() { model.insertString("a"); manager.onKeyEvent(new TypeAheadEvent(TypeAheadEvent.EventType.LeftArrow)); assertFalse(didDrawPredictions()); } }.fillLatencyStats().setIsNotPasswordPrompt().run(); }
public void addAppender(Appender<E> newAppender) { if (newAppender == null) { throw new IllegalArgumentException("Null argument disallowed"); } appenderList.addIfAbsent(newAppender); }
@Test public void testAddAppender() throws Exception { TestEvent event = new TestEvent(); NOPAppender<TestEvent> ta = new NOPAppender<TestEvent>(); ta.start(); aai.addAppender(ta); ta = new NOPAppender<TestEvent>(); ta.setName("test"); ta.start(); aai.addAppender(ta); int size = aai.appendLoopOnAppenders(event); Assertions.assertTrue(size == 2, "Incorrect number of appenders"); }
Boolean shouldAssignWorkResponseFromBody(String responseBody) { return DEFAULT_GSON.fromJson(responseBody, Boolean.class); }
@Test public void shouldUnJSONizeShouldAssignWorkResponseFromBody() { assertTrue(new ElasticAgentExtensionConverterV4().shouldAssignWorkResponseFromBody("true")); assertFalse(new ElasticAgentExtensionConverterV4().shouldAssignWorkResponseFromBody("false")); }
@Operation(summary = "getAlertPluginInstance", description = "GET_ALERT_PLUGIN_INSTANCE_NOTES") @GetMapping(value = "/{id}") @ResponseStatus(HttpStatus.OK) @ApiException(GET_ALERT_PLUGIN_INSTANCE_ERROR) public Result<AlertPluginInstance> getAlertPluginInstance(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable(value = "id") int id) { AlertPluginInstance alertPluginInstance = alertPluginInstanceService.getById(loginUser, id); return Result.success(alertPluginInstance); }
@Test public void testGetAlertPluginInstance() throws Exception { // Given final MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("pluginDefineId", String.valueOf(pluginDefineId)); when(alertPluginInstanceService.getById(any(User.class), eq(pluginDefineId))).thenReturn(null); // When final MvcResult mvcResult = mockMvc.perform(get("/alert-plugin-instances/{id}", pluginDefineId) .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); // Then final Result actualResponseContent = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); assertThat(actualResponseContent.getMsg()).isEqualTo(expectResponseContent.getMsg()); assertThat(actualResponseContent.getCode()).isEqualTo(expectResponseContent.getCode()); }
@Override public Map<String, Set<Integer>> brokerIdsByBrokerSetId(ClusterModel clusterModel) throws BrokerSetResolutionException { Map<String, Set<Integer>> brokerIdsByBrokerSetId; try { brokerIdsByBrokerSetId = loadBrokerSetData(); } catch (IOException e) { throw new BrokerSetResolutionException(e.getMessage()); } return _brokerSetAssignmentPolicy.assignBrokerSetsForUnresolvedBrokers(clusterModel, brokerIdsByBrokerSetId); }
@Test public void testParseBrokerSetFile() throws BrokerSetResolutionException { BrokerSetResolver brokerSetResolver = getBrokerSetResolver("testBrokerSets.json", this.getClass()); final Map<String, Set<Integer>> brokerSets = brokerSetResolver.brokerIdsByBrokerSetId( BrokerSetResolutionHelper.getRackIdByBrokerIdMapping(DeterministicCluster.brokerSetSatisfiable1())); assertNotNull(brokerSets); assertTrue(brokerSets.containsKey("Blue")); assertTrue(brokerSets.containsKey("Green")); assertEquals(Set.of(0, 1, 2), brokerSets.get("Blue")); assertEquals(Set.of(3, 4, 5), brokerSets.get("Green")); }
static void setPropertiesForRecipientList( RecipientList recipientList, CamelContext camelContext, DynamicRouterConfiguration cfg) { recipientList.setAggregationStrategy(createAggregationStrategy(camelContext, cfg)); recipientList.setParallelProcessing(cfg.isParallelProcessing()); recipientList.setParallelAggregate(cfg.isParallelAggregate()); recipientList.setSynchronous(cfg.isSynchronous()); recipientList.setStreaming(cfg.isStreaming()); recipientList.setShareUnitOfWork(cfg.isShareUnitOfWork()); recipientList.setStopOnException(cfg.isStopOnException()); recipientList.setIgnoreInvalidEndpoints(cfg.isIgnoreInvalidEndpoints()); recipientList.setCacheSize(cfg.getCacheSize()); if (cfg.getOnPrepare() != null) { recipientList.setOnPrepare(mandatoryLookup(camelContext, cfg.getOnPrepare(), Processor.class)); } if (cfg.getTimeout() > 0 && !cfg.isParallelProcessing()) { throw new IllegalArgumentException("Timeout is used but ParallelProcessing has not been enabled."); } recipientList.setTimeout(cfg.getTimeout()); }
@Test void testSetPropertiesForRecipientListWithTimeoutAndNotParallelProcessing() { // Set up mocking when(mockConfig.isParallelProcessing()).thenReturn(false); when(mockConfig.getTimeout()).thenReturn(1000L); // Invoke the method under test Exception ex = assertThrows(IllegalArgumentException.class, () -> DynamicRouterRecipientListHelper.setPropertiesForRecipientList(recipientList, camelContext, mockConfig)); assertEquals("Timeout is used but ParallelProcessing has not been enabled.", ex.getMessage()); }
public boolean execute( WorkflowSummary workflowSummary, Step step, StepRuntimeSummary runtimeSummary) { StepRuntime.Result result = getStepRuntime(runtimeSummary.getType()) .execute(workflowSummary, step, cloneSummary(runtimeSummary)); runtimeSummary.mergeRuntimeUpdate(result.getTimeline(), result.getArtifacts()); switch (result.getState()) { case CONTINUE: return true; case DONE: runtimeSummary.markFinishing(tracingManager); return result.shouldPersist(); case USER_ERROR: markTerminatedWithMetric( runtimeSummary, result.getState(), getUserErrorStatus(runtimeSummary)); return false; case PLATFORM_ERROR: markTerminatedWithMetric( runtimeSummary, result.getState(), getPlatformErrorStatus(runtimeSummary)); return false; case FATAL_ERROR: markTerminatedWithMetric( runtimeSummary, result.getState(), StepInstance.Status.FATALLY_FAILED); return false; case STOPPED: markTerminatedWithMetric(runtimeSummary, result.getState(), StepInstance.Status.STOPPED); return false; case TIMED_OUT: markTerminatedWithMetric(runtimeSummary, result.getState(), StepInstance.Status.TIMED_OUT); return false; default: throw new MaestroInternalError( "Entered an unexpected result state [%s] for step %s when executing", result.getState(), runtimeSummary.getIdentity()); } }
@Test public void testExecute() { StepRuntimeSummary summary = StepRuntimeSummary.builder() .type(StepType.NOOP) .stepRetry(StepInstance.StepRetry.from(Defaults.DEFAULT_RETRY_POLICY)) .build(); boolean ret = runtimeManager.execute(workflowSummary, null, summary); assertTrue(ret); assertEquals(StepInstance.Status.FINISHING, summary.getRuntimeState().getStatus()); assertNotNull(summary.getRuntimeState().getFinishTime()); assertNotNull(summary.getRuntimeState().getModifyTime()); assertEquals(1, summary.getPendingRecords().size()); assertEquals( StepInstance.Status.NOT_CREATED, summary.getPendingRecords().get(0).getOldStatus()); assertEquals(StepInstance.Status.FINISHING, summary.getPendingRecords().get(0).getNewStatus()); assertEquals(artifact, summary.getArtifacts().get("test-artifact")); assertTrue(summary.getTimeline().isEmpty()); }
@Override public Set<Long> calculateUsers(DelegateExecution execution, String param) { Set<Long> postIds = StrUtils.splitToLongSet(param); List<AdminUserRespDTO> users = adminUserApi.getUserListByPostIds(postIds).getCheckedData(); return convertSet(users, AdminUserRespDTO::getId); }
@Test public void testCalculateUsers() { // 准备参数 String param = "1,2"; // mock 方法 List<AdminUserRespDTO> users = convertList(asSet(11L, 22L), id -> new AdminUserRespDTO().setId(id)); when(adminUserApi.getUserListByPostIds(eq(asSet(1L, 2L)))).thenReturn(success(users)); // 调用 Set<Long> results = strategy.calculateUsers(null, param); // 断言 assertEquals(asSet(11L, 22L), results); }
@Override public KsMaterializedQueryResult<Row> get( final GenericKey key, final int partition, final Optional<Position> position ) { try { final KeyQuery<GenericKey, ValueAndTimestamp<GenericRow>> query = KeyQuery.withKey(key); StateQueryRequest<ValueAndTimestamp<GenericRow>> request = inStore(stateStore.getStateStoreName()) .withQuery(query) .withPartitions(ImmutableSet.of(partition)); if (position.isPresent()) { request = request.withPositionBound(PositionBound.at(position.get())); } final StateQueryResult<ValueAndTimestamp<GenericRow>> result = stateStore.getKafkaStreams().query(request); final QueryResult<ValueAndTimestamp<GenericRow>> queryResult = result.getPartitionResults().get(partition); // Some of these failures are retriable, and in the future, we may want to retry // locally before throwing. if (queryResult.isFailure()) { throw failedQueryException(queryResult); } else if (queryResult.getResult() == null) { return KsMaterializedQueryResult.rowIteratorWithPosition( Collections.emptyIterator(), queryResult.getPosition()); } else { final ValueAndTimestamp<GenericRow> row = queryResult.getResult(); return KsMaterializedQueryResult.rowIteratorWithPosition( ImmutableList.of(Row.of(stateStore.schema(), key, row.value(), row.timestamp())) .iterator(), queryResult.getPosition()); } } catch (final NotUpToBoundException | MaterializationException e) { throw e; } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldThrowIfTableScanQueryResultIsError() { // Given: when(kafkaStreams.query(any())).thenReturn(getErrorResult()); // When: final Exception e = assertThrows( MaterializationException.class, () -> table.get(PARTITION) ); // Then: assertThat(e.getMessage(), containsString("Error!")); assertThat(e, (instanceOf(MaterializationException.class))); }
public boolean isEnabled() { return (conf.scanPeriodMs > 0) && (conf.targetBytesPerSec > 0); }
@Test(timeout=60000) public void testDisableVolumeScanner2() throws Exception { Configuration conf = new Configuration(); conf.setLong(DFS_BLOCK_SCANNER_VOLUME_BYTES_PER_SECOND, -1L); try(TestContext ctx = new TestContext(conf, 1)) { assertFalse(ctx.datanode.getBlockScanner().isEnabled()); } }
@Override public <KR, VR> KStream<KR, VR> flatMap(final KeyValueMapper<? super K, ? super V, ? extends Iterable<? extends KeyValue<? extends KR, ? extends VR>>> mapper) { return flatMap(mapper, NamedInternal.empty()); }
@Test public void shouldNotAllowNullMapperOnFlatMapWithNamed() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.flatMap(null, Named.as("flatMapper"))); assertThat(exception.getMessage(), equalTo("mapper can't be null")); }
@Nullable public byte[] getValue() { return mValue; }
@Test public void setValue_UINT32_BE() { final MutableData data = new MutableData(new byte[4]); data.setValue(0x04030201, Data.FORMAT_UINT32_BE, 0); assertArrayEquals(new byte[] { 0x04, 0x03, 0x02, 0x01 } , data.getValue()); }
public static KeyValueBytesStoreSupplier persistentTimestampedKeyValueStore(final String name) { Objects.requireNonNull(name, "name cannot be null"); return new RocksDBKeyValueBytesStoreSupplier(name, true); }
@Test public void shouldThrowIfPersistentTimestampedKeyValueStoreStoreNameIsNull() { final Exception e = assertThrows(NullPointerException.class, () -> Stores.persistentTimestampedKeyValueStore(null)); assertEquals("name cannot be null", e.getMessage()); }
public B configCenter(ConfigCenterConfig configCenter) { this.configCenter = configCenter; return getThis(); }
@Test void configCenter() { ConfigCenterConfig configCenterConfig = new ConfigCenterConfig(); InterfaceBuilder builder = new InterfaceBuilder(); builder.configCenter(configCenterConfig); Assertions.assertEquals(configCenterConfig, builder.build().getConfigCenter()); }
@Restricted(NoExternalUse.class) /*package*/ static Set<String> getIllegalPersistedUsernames() { return new HashSet<>(Arrays.asList(ILLEGAL_PERSISTED_USERNAMES)); }
@Test @Issue("JENKINS-35967") public void shouldNotAllowIllegalRestrictedNamesEvenIfTrimmed() { for (String username : User.getIllegalPersistedUsernames()) { assertIdOrFullNameNotAllowed(username); assertIdOrFullNameNotAllowed(" " + username); assertIdOrFullNameNotAllowed(username + " "); assertIdOrFullNameNotAllowed(" " + username + " "); assertIdOrFullNameNotAllowed("\t" + username + "\t"); } }
@Override public <KEY> URIMappingResult<KEY> mapUris(List<URIKeyPair<KEY>> requestUriKeyPairs) throws ServiceUnavailableException { if (requestUriKeyPairs == null || requestUriKeyPairs.isEmpty()) { return new URIMappingResult<>(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); } // API assumes that all requests will be made to the same service, just use the first request to get the service name and act as sample uri URI sampleURI = requestUriKeyPairs.get(0).getRequestUri(); String serviceName = LoadBalancerUtil.getServiceNameFromUri(sampleURI); // To achieve scatter-gather, we require the following information PartitionAccessor accessor = _partitionInfoProvider.getPartitionAccessor(serviceName); Map<Integer, Ring<URI>> rings = _hashRingProvider.getRings(sampleURI); HashFunction<Request> hashFunction = _hashRingProvider.getRequestHashFunction(serviceName); Map<Integer, Set<KEY>> unmapped = new HashMap<>(); // Pass One Map<Integer, List<URIKeyPair<KEY>>> requestsByPartition = distributeToPartitions(requestUriKeyPairs, accessor, unmapped); // Pass Two Map<URI, Integer> hostToParitionId = new HashMap<>(); Map<URI, Set<KEY>> hostToKeySet = distributeToHosts(requestsByPartition, rings, hashFunction, hostToParitionId, unmapped); return new URIMappingResult<>(hostToKeySet, unmapped, hostToParitionId); }
@Test(dataProvider = "stickyPartitionPermutation") public void testPartitionIdOverride(boolean sticky, boolean partitioned) throws Exception { int partitionCount = partitioned ? 10 : 1; int totalHostCount = 100; HashRingProvider ringProvider = createStaticHashRingProvider(totalHostCount, partitionCount, getHashFunction(sticky)); PartitionInfoProvider infoProvider = createRangeBasedPartitionInfoProvider(partitionCount); URIMapper mapper = new RingBasedUriMapper(ringProvider, infoProvider); URIKeyPair<Integer> request = new URIKeyPair<>(new URI("d2://testService/1"), IntStream.range(0, partitionCount).boxed().collect(Collectors.toSet())); if (partitioned) { Assert.assertThrows(() -> mapper.mapUris(Arrays.asList(request, request))); } URIMappingResult<Integer> uriMapperResult = mapper.mapUris(Collections.singletonList(request)); Map<URI, Set<Integer>> mappedKeys = uriMapperResult.getMappedKeys(); Assert.assertTrue(uriMapperResult.getUnmappedKeys().isEmpty()); Assert.assertEquals(mappedKeys.size(), partitionCount); Assert.assertEquals( mappedKeys.keySet().stream().map(URIMapperTestUtil::getPartitionIdForURI).collect(Collectors.toSet()).size(), partitionCount); for (Set<Integer> keys : mappedKeys.values()) { Assert.assertTrue(keys.isEmpty()); } }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final String prefix = containerService.isContainer(directory) ? StringUtils.EMPTY : containerService.getKey(directory) + Path.DELIMITER; return this.list(directory, listener, prefix); }
@Test public void testListPlaceholderParent() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final String name = new AlphanumericRandomStringService().random(); final Path placeholder = new Path(container, name, EnumSet.of(Path.Type.directory)); new SwiftDirectoryFeature(session).mkdir(placeholder, new TransferStatus()); final AttributedList<Path> list = new SwiftObjectListService(session).list(placeholder.getParent(), new DisabledListProgressListener()); assertTrue(list.contains(placeholder)); assertTrue(list.contains(new Path(container, name, EnumSet.of(Path.Type.directory, Path.Type.placeholder)))); assertSame(list.get(placeholder), list.get(new Path(container, name, EnumSet.of(Path.Type.directory, Path.Type.placeholder)))); new SwiftDeleteFeature(session).delete(Collections.singletonList(placeholder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public void recordLatency(String node, long requestLatencyMs) { if (!node.isEmpty()) { String nodeTimeName = "node-" + node + ".latency"; Sensor nodeRequestTime = this.metrics.getSensor(nodeTimeName); if (nodeRequestTime != null) nodeRequestTime.record(requestLatencyMs); } }
@Test public void testSingleNodeLatency() { String connectionId = "0"; MetricName nodeLatencyAvg = metrics.metricName("request-latency-avg", group); MetricName nodeLatencyMax = metrics.metricName("request-latency-max", group); registerNodeLatencyMetric(connectionId, nodeLatencyAvg, nodeLatencyMax); adminFetchMetricsManager.recordLatency(connectionId, 333); mockSleepTimeWindow(); adminFetchMetricsManager.recordLatency(connectionId, 444); assertEquals(388.5, metricValue(nodeLatencyAvg), EPSILON); assertEquals(444, metricValue(nodeLatencyMax), EPSILON); adminFetchMetricsManager.recordLatency(connectionId, 666); assertEquals(481, metricValue(nodeLatencyAvg), EPSILON); assertEquals(666, metricValue(nodeLatencyMax), EPSILON); // first record(333) expired mockSleepTimeWindow(); assertEquals(555, metricValue(nodeLatencyAvg), EPSILON); assertEquals(666, metricValue(nodeLatencyMax), EPSILON); // all records expired mockSleepTimeWindow(); assertTrue(Double.isNaN(metricValue(nodeLatencyAvg))); assertTrue(Double.isNaN(metricValue(nodeLatencyMax))); }
@Async @Transactional public SamlMetadataProcessResult startCollectMetadata(Connection con, Map<String, String> map) { SamlMetadataProcessResult result = new SamlMetadataProcessResult(con.getId()); EntitiesDescriptor descriptor; try { String metadataXML = getMetadataFromConnection(con); descriptor = convertMetadataXMLtoEntitiesDescriptor(metadataXML); String hash = getSignatureValue(descriptor.getSignature()); Optional<SamlMetadataProcessResult> process = samlMetadataProcessResultRepository.findByConnectionIdAndHash(con.getId(), hash); if (process.isPresent()) return result; updateMetadata(descriptor, con, map, result); result.setMetadata(metadataXML); if (result.allEntriesSuccessful()) { result.setHash(hash); } } catch (InitializationException | ComponentInitializationException | UnmarshallingException | IOException | MetadataParseException e) { map.put("status", "failed"); LOGGER.error("Failed to collect/parse metadata: {}", e.getMessage()); result.addProcessError(e.getMessage(), ""); } samlMetadataProcessResultRepository.saveAndFlush(result); return result; }
@Test public void startCollectMetadataValidTest() throws IOException { // Connection, service and certificate expected values from metadata/valid-valid-metadata.xml Connection connection = newConnection(); Service service = newService(); List<Certificate> certificates = getServiceCertificates(); when(httpClientMock.execute(any(HttpGet.class))).thenReturn(httpResponseMock); when(httpResponseMock.getEntity()).thenReturn(httpEntityMock); when(httpEntityMock.getContent()).thenReturn(getClass().getClassLoader().getResourceAsStream("metadata/valid-metadata.xml")); when(serviceServiceMock.findAllowedServiceById(anyLong(), anyString())).thenReturn(service); // Map can be null if no errors occur SamlMetadataProcessResult result = metadataProcessorServiceMock.startCollectMetadata(connection, null); // Result assertEquals(3, result.getTotalUpdated()); assertEquals(0, result.getTotalErrors()); assertEquals(3, result.getTotalProcessed()); assertEquals(0, result.getSamlMetadataProcessErrors().size()); assertEquals(3, service.getCertificates().size()); // Connection assertNotNull(result.getMetadata()); assertEquals(CertificateType.SIGNING, connection.getCertificates().get(0).getCertType()); // Service for (int i = 0; i<certificates.size(); i++) { assertEquals(certificates.get(i).getCachedCertificate(), service.getCertificates().get(i).getCachedCertificate()); assertEquals(certificates.get(i).getFingerprint(), service.getCertificates().get(i).getFingerprint()); assertEquals(certificates.get(i).getDistinguishedName(), service.getCertificates().get(i).getDistinguishedName()); assertEquals(certificates.get(i).getActiveFrom(), service.getCertificates().get(i).getActiveFrom()); assertEquals(certificates.get(i).getActiveUntil(), service.getCertificates().get(i).getActiveUntil()); assertEquals(certificates.get(i).getCertType(), service.getCertificates().get(i).getCertType()); } }
public double bandwidth() { return h; }
@Test public void testBandwidth() { System.out.println("bandwidth"); KernelDensity instance = new KernelDensity(x); double expResult = 1.1933; double result = instance.bandwidth(); assertEquals(expResult, result, 1E-4); }
public Map<String, Object> configStorageTopicSettings() { return topicSettings(CONFIG_STORAGE_PREFIX); }
@Test public void shouldAllowSettingConfigTopicSettings() { Map<String, String> topicSettings = new HashMap<>(); topicSettings.put("foo", "foo value"); topicSettings.put("bar", "bar value"); topicSettings.put("baz.bim", "100"); Map<String, String> settings = configs(); topicSettings.forEach((k, v) -> settings.put(DistributedConfig.CONFIG_STORAGE_PREFIX + k, v)); DistributedConfig config = new DistributedConfig(settings); assertEquals(topicSettings, config.configStorageTopicSettings()); }
@Deprecated public WritableByteChannel create(GcsPath path, String type) throws IOException { CreateOptions.Builder builder = CreateOptions.builder().setContentType(type); return create(path, builder.build()); }
@Test public void testCreate() throws IOException { GcsOptions gcsOptions = gcsOptionsWithTestCredential(); GcsUtilMock gcsUtil = GcsUtilMock.createMock(gcsOptions); GoogleCloudStorage mockStorage = Mockito.mock(GoogleCloudStorage.class); WritableByteChannel mockChannel = Mockito.mock(WritableByteChannel.class); gcsUtil.googleCloudStorage = mockStorage; when(mockStorage.create(any(), any())).thenReturn(mockChannel); GcsPath path = GcsPath.fromUri("gs://testbucket/testdirectory/otherfile"); CreateOptions createOptions = CreateOptions.builder().build(); assertEquals(mockChannel, gcsUtil.create(path, createOptions)); }
public static String getPathTemplate(final ExtendedUriInfo uriInfo) { final StringBuilder pathBuilder = new StringBuilder(); for (int i = uriInfo.getMatchedTemplates().size() - 1; i >= 0; i--) { pathBuilder.append(uriInfo.getMatchedTemplates().get(i).getTemplate()); } return pathBuilder.toString(); }
@Test void testGetPathTemplate() { final UriTemplate firstComponent = new UriTemplate("/first"); final UriTemplate secondComponent = new UriTemplate("/second"); final UriTemplate thirdComponent = new UriTemplate("/{param}/{moreDifferentParam}"); final ExtendedUriInfo uriInfo = mock(ExtendedUriInfo.class); when(uriInfo.getMatchedTemplates()).thenReturn(Arrays.asList(thirdComponent, secondComponent, firstComponent)); assertEquals("/first/second/{param}/{moreDifferentParam}", UriInfoUtil.getPathTemplate(uriInfo)); }
@SuppressWarnings( "unchecked" ) public <T extends ConnectionDetails> boolean save( IMetaStore metaStore, T connectionDetails ) { return save( metaStore, connectionDetails, true ); }
@Test public void testSaveConnectionError() { assertFalse( connectionManager.save( new BadConnectionDetails() ) ); }
public static void trackClientMd5(String ip, String groupKey, String clientMd5) { ClientRecord record = getClientRecord(ip); record.setLastTime(System.currentTimeMillis()); record.getGroupKey2md5Map().put(groupKey, clientMd5); record.getGroupKey2pollingTsMap().put(groupKey, record.getLastTime()); }
@Test void testTrackClientMd5() { String clientIp = "1.1.1.1"; String dataId = "com.taobao.session.xml"; String group = "online"; String groupKey = GroupKey2.getKey(dataId, group); String md5 = "xxxxxxxxxxxxx"; ConfigCacheService.updateMd5(groupKey, md5, System.currentTimeMillis(), ""); ClientTrackService.trackClientMd5(clientIp, groupKey, md5); ClientTrackService.trackClientMd5(clientIp, groupKey, md5); assertTrue(ClientTrackService.isClientUptodate(clientIp).get(groupKey)); assertEquals(1, ClientTrackService.subscribeClientCount()); assertEquals(1, ClientTrackService.subscriberCount()); //服务端数据更新 ConfigCacheService.updateMd5(groupKey, md5 + "111", System.currentTimeMillis(), ""); assertFalse(ClientTrackService.isClientUptodate(clientIp).get(groupKey)); }
@Override void validateKeyPresent(final SourceName sinkName, final Projection projection) { if (getSchema().key().isEmpty()) { // No key column. return; } final List<Column> keys = getSchema().key(); for (final Column keyCol : keys) { final ColumnName keyName = keyCol.name(); if (!projection.containsExpression(new QualifiedColumnReferenceExp(getAlias(), keyName)) && !projection.containsExpression(new UnqualifiedColumnReferenceExp(keyName)) ) { throwKeysNotIncludedError( sinkName, "key column", keys.stream() .map(Column::name) .map(UnqualifiedColumnReferenceExp::new) .collect(Collectors.toList()) ); } } }
@Test public void shouldNotThrowIfProjectionContainsKeyColumns() { // Given: when(projection.containsExpression(any())).thenReturn(true); // When: node.validateKeyPresent(SOURCE_NAME, projection); // Then: did not throw. }
public FloatArrayAsIterable usingTolerance(double tolerance) { return new FloatArrayAsIterable(tolerance(tolerance), iterableSubject()); }
@Test public void usingTolerance_containsAnyOf_primitiveFloatArray_failure() { expectFailureWhenTestingThat(array(1.1f, TOLERABLE_2POINT2, 3.3f)) .usingTolerance(DEFAULT_TOLERANCE) .containsAnyOf(array(99.99f, 999.999f)); assertFailureKeys("value of", "expected to contain any of", "testing whether", "but was"); assertFailureValue("expected to contain any of", "[" + 99.99f + ", " + 999.999f + "]"); }
public CompletableFuture<Optional<Account>> getByPhoneNumberIdentifierAsync(final UUID pni) { return checkRedisThenAccountsAsync( getByNumberTimer, () -> redisGetBySecondaryKeyAsync(getAccountMapKey(pni.toString()), redisPniGetTimer), () -> accounts.getByPhoneNumberIdentifierAsync(pni) ); }
@Test void testGetAccountByPniNotInCacheAsync() { UUID uuid = UUID.randomUUID(); UUID pni = UUID.randomUUID(); Account account = AccountsHelper.generateTestAccount("+14152222222", uuid, pni, new ArrayList<>(), new byte[UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH]); when(asyncCommands.get(eq("AccountMap::" + pni))).thenReturn(MockRedisFuture.completedFuture(null)); when(asyncCommands.setex(any(), anyLong(), any())).thenReturn(MockRedisFuture.completedFuture("OK")); when(accounts.getByPhoneNumberIdentifierAsync(pni)) .thenReturn(CompletableFuture.completedFuture(Optional.of(account))); Optional<Account> retrieved = accountsManager.getByPhoneNumberIdentifierAsync(pni).join(); assertTrue(retrieved.isPresent()); assertSame(retrieved.get(), account); verify(asyncCommands).get(eq("AccountMap::" + pni)); verify(asyncCommands).setex(eq("AccountMap::" + pni), anyLong(), eq(uuid.toString())); verify(asyncCommands).setex(eq("Account3::" + uuid), anyLong(), anyString()); verifyNoMoreInteractions(asyncCommands); verify(accounts).getByPhoneNumberIdentifierAsync(pni); verifyNoMoreInteractions(accounts); }
Duration getLockAtLeastFor(AnnotationData annotation) { return getValue( annotation.getLockAtLeastFor(), annotation.getLockAtLeastForString(), this.defaultLockAtLeastFor, "lockAtLeastForString"); }
@Test public void shouldGetZeroGracePeriodFromAnnotation() throws NoSuchMethodException { noopResolver(); SpringLockConfigurationExtractor.AnnotationData annotation = getAnnotation("annotatedMethodWithZeroGracePeriod"); TemporalAmount gracePeriod = extractor.getLockAtLeastFor(annotation); assertThat(gracePeriod).isEqualTo(Duration.ZERO); }
@Override public ReferenceConfig<T> build() { ReferenceConfig<T> reference = new ReferenceConfig<>(); super.build(reference); reference.setInterface(interfaceName); if (interfaceClass != null) { reference.setInterface(interfaceClass); } reference.setClient(client); reference.setUrl(url); reference.setMethods(methods); reference.setConsumer(consumer); reference.setProtocol(protocol); // @since 2.7.8 reference.setServices(services); return reference; }
@Test void build() { ConsumerConfig consumer = new ConsumerConfig(); MethodConfig method = new MethodConfig(); ReferenceBuilder<DemoService> builder = new ReferenceBuilder<>(); builder.id("id") .interfaceClass(DemoService.class) .protocol("protocol") .client("client") .url("url") .consumer(consumer) .addMethod(method) // introduced since 2.7.8 .services("test-service", "test-service2"); ReferenceConfig config = builder.build(); ReferenceConfig config2 = builder.build(); Assertions.assertEquals("org.apache.dubbo.config.api.DemoService", config.getInterface()); Assertions.assertEquals(DemoService.class, config.getInterfaceClass()); Assertions.assertEquals("protocol", config.getProtocol()); Assertions.assertEquals("client", config.getClient()); Assertions.assertEquals("url", config.getUrl()); Assertions.assertEquals(consumer, config.getConsumer()); Assertions.assertEquals("test-service,test-service2", config.getServices()); Assertions.assertEquals(ofSet("test-service", "test-service2"), config.getSubscribedServices()); Assertions.assertTrue(config.getMethods().contains(method)); Assertions.assertEquals(1, config.getMethods().size()); Assertions.assertNotSame(config, config2); }
@Udf(description = "Returns the sine of an INT value") public Double sin( @UdfParameter( value = "value", description = "The value in radians to get the sine of." ) final Integer value ) { return sin(value == null ? null : value.doubleValue()); }
@Test public void shouldHandlePositive() { assertThat(udf.sin(0.43), closeTo(0.41687080242921076, 0.000000000000001)); assertThat(udf.sin(Math.PI), closeTo(0, 0.000000000000001)); assertThat(udf.sin(Math.PI * 2), closeTo(0, 0.000000000000001)); assertThat(udf.sin(6), closeTo(-0.27941549819892586, 0.000000000000001)); assertThat(udf.sin(6L), closeTo(-0.27941549819892586, 0.000000000000001)); }
public static void register(ApplicationId appId) { SecurityAdminService service = getSecurityService(); if (service != null) { service.register(appId); } }
@Test public void testRegister() { service.register(appId); }
@Override public boolean shouldKeepLeft(ServiceUnitStateData from, ServiceUnitStateData to) { if (to == null) { return false; } if (from != null) { if (from.versionId() == Long.MAX_VALUE && to.versionId() == Long.MIN_VALUE) { // overflow } else if (from.versionId() >= to.versionId()) { return true; } else if (from.versionId() < to.versionId() - 1) { // Compacted return false; } // else from.versionId() == to.versionId() - 1 // continue to check further } if (to.force()) { return false; } ServiceUnitState prevState = state(from); ServiceUnitState state = state(to); if (!ServiceUnitState.isValidTransition(prevState, state)) { return true; } if (checkBrokers) { switch (prevState) { case Owned: switch (state) { case Splitting: return isNotBlank(to.dstBroker()) || !from.dstBroker().equals(to.sourceBroker()); case Releasing: return invalidUnload(from, to); } case Assigning: switch (state) { case Owned: return notEquals(from, to); } case Releasing: switch (state) { case Assigning: return isBlank(to.dstBroker()) || notEquals(from, to); case Free: return notEquals(from, to); } case Splitting: switch (state) { case Deleted: return notEquals(from, to); } case Free: switch (state) { case Assigning: return isNotBlank(to.sourceBroker()) || isBlank(to.dstBroker()); } } } return false; }
@Test public void testTransitionsAndBrokers() { assertTrue(strategy.shouldKeepLeft(data(Init), data2(Init))); assertFalse(strategy.shouldKeepLeft(data(Init), data2(Free))); assertFalse(strategy.shouldKeepLeft(data(Init), data2(Assigning))); assertFalse(strategy.shouldKeepLeft(data(Init), data2(Owned))); assertFalse(strategy.shouldKeepLeft(data(Init), data2(Releasing))); assertFalse(strategy.shouldKeepLeft(data(Init), data2(Splitting))); assertFalse(strategy.shouldKeepLeft(data(Init), data2(Deleted))); assertTrue(strategy.shouldKeepLeft(data(Assigning), data2(Init))); assertTrue(strategy.shouldKeepLeft(data(Assigning), data2(Free))); assertTrue(strategy.shouldKeepLeft(data(Assigning), data2(Assigning))); assertTrue(strategy.shouldKeepLeft(data(Assigning, "dst1"), data2(Owned, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Assigning, dst), data2(Owned, src, dst))); assertFalse(strategy.shouldKeepLeft(data(Assigning, dst), data2(Owned, dst))); assertFalse(strategy.shouldKeepLeft(data(Assigning, src, dst), data2(Owned, src, dst))); assertTrue(strategy.shouldKeepLeft(data(Assigning, src, dst), data2(Releasing, dst))); assertTrue(strategy.shouldKeepLeft(data(Assigning, src, dst), data2(Releasing, src, dst))); assertTrue(strategy.shouldKeepLeft(data(Assigning), data2(Splitting, dst))); assertTrue(strategy.shouldKeepLeft(data(Assigning), data2(Deleted, dst))); assertTrue(strategy.shouldKeepLeft(data(Owned), data2(Init))); assertTrue(strategy.shouldKeepLeft(data(Owned), data2(Free))); assertTrue(strategy.shouldKeepLeft(data(Owned), data2(Assigning))); assertTrue(strategy.shouldKeepLeft(data(Owned), data2(Owned))); assertTrue(strategy.shouldKeepLeft(data(Owned), data2(Releasing, dst))); assertTrue(strategy.shouldKeepLeft(data(Owned, src, "dst1"), data2(Releasing, src, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Owned, "dst1"), data2(Releasing, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Owned, dst), data2(Releasing, dst))); assertTrue(strategy.shouldKeepLeft(data(Owned, src, dst), data2(Releasing, null, dst))); assertTrue(strategy.shouldKeepLeft(data(Owned, src, dst), data2(Releasing, src, null))); assertFalse(strategy.shouldKeepLeft(data(Owned, src, dst), data2(Releasing, dst, null))); assertTrue(strategy.shouldKeepLeft(data(Owned, src, "dst1"), data2(Releasing, src, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Owned, "src1", dst), data2(Releasing, "src2", dst))); assertTrue(strategy.shouldKeepLeft(data(Owned, src, dst), data2(Releasing, src, dst))); assertFalse(strategy.shouldKeepLeft(data(Owned, src, dst), data2(Releasing, dst, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Owned, src, "dst1"), data2(Splitting, src, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Owned, "dst1"), data2(Splitting, "dst2"))); assertFalse(strategy.shouldKeepLeft(data(Owned, dst), data2(Splitting, dst, null))); assertFalse(strategy.shouldKeepLeft(data(Owned, src, dst), data2(Splitting, dst, null))); assertTrue(strategy.shouldKeepLeft(data(Owned), data2(Deleted, dst))); assertTrue(strategy.shouldKeepLeft(data(Releasing), data2(Init))); assertFalse(strategy.shouldKeepLeft(data(Releasing), data2(Free))); assertTrue(strategy.shouldKeepLeft(data(Releasing, "dst1"), data2(Free, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Releasing, "src1", dst), data2(Free, "src2", dst))); assertTrue(strategy.shouldKeepLeft(data(Releasing, src, "dst1"), data2(Assigning, src, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Releasing, src, "dst1"), data2(Assigning, src, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Releasing, "src1", dst), data2(Assigning, "src2", dst))); assertFalse(strategy.shouldKeepLeft(data(Releasing, src, dst), data2(Assigning, src, dst))); assertTrue(strategy.shouldKeepLeft(data(Releasing), data2(Owned))); assertTrue(strategy.shouldKeepLeft(data(Releasing), data2(Releasing))); assertTrue(strategy.shouldKeepLeft(data(Releasing), data2(Splitting))); assertTrue(strategy.shouldKeepLeft(data(Releasing), data2(Deleted, dst))); assertTrue(strategy.shouldKeepLeft(data(Splitting), data2(Init))); assertTrue(strategy.shouldKeepLeft(data(Splitting), data2(Free))); assertTrue(strategy.shouldKeepLeft(data(Splitting), data2(Assigning))); assertTrue(strategy.shouldKeepLeft(data(Splitting), data2(Owned))); assertTrue(strategy.shouldKeepLeft(data(Splitting), data2(Releasing))); assertTrue(strategy.shouldKeepLeft(data(Splitting), data2(Splitting))); assertTrue(strategy.shouldKeepLeft(data(Splitting, src, "dst1"), data2(Deleted, src, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Splitting, "dst1"), data2(Deleted, "dst2"))); assertTrue(strategy.shouldKeepLeft(data(Splitting, "src1", dst), data2(Deleted, "src2", dst))); assertFalse(strategy.shouldKeepLeft(data(Splitting, dst), data2(Deleted, dst))); assertFalse(strategy.shouldKeepLeft(data(Splitting, src, dst), data2(Deleted, src, dst))); assertFalse(strategy.shouldKeepLeft(data(Deleted), data2(Init))); assertTrue(strategy.shouldKeepLeft(data(Deleted), data2(Free))); assertTrue(strategy.shouldKeepLeft(data(Deleted), data2(Assigning))); assertTrue(strategy.shouldKeepLeft(data(Deleted), data2(Owned))); assertTrue(strategy.shouldKeepLeft(data(Deleted), data2(Releasing))); assertTrue(strategy.shouldKeepLeft(data(Deleted), data2(Splitting))); assertTrue(strategy.shouldKeepLeft(data(Deleted), data2(Deleted))); assertFalse(strategy.shouldKeepLeft(data(Free), data2(Init))); assertTrue(strategy.shouldKeepLeft(data(Free), data2(Free))); assertFalse(strategy.shouldKeepLeft(data(Free), data2(Assigning))); assertTrue(strategy.shouldKeepLeft(data(Free), data2(Assigning, src, dst))); assertTrue(strategy.shouldKeepLeft(data(Free), data2(Owned))); assertTrue(strategy.shouldKeepLeft(data(Free), data2(Releasing))); assertTrue(strategy.shouldKeepLeft(data(Free), data2(Splitting))); assertTrue(strategy.shouldKeepLeft(data(Free), data2(Deleted))); }
public static DataMap getAnnotationsMap(Annotation[] as) { return annotationsToData(as, true); }
@Test(description = "Unsafe call: null input", expectedExceptions = NullPointerException.class) public void failsOnNullInput() { ResourceModelAnnotation.getAnnotationsMap(null); Assert.fail("Should fail throwing a NullPointerException"); }
public static MetricNamingStrategy<MetricName> getClientTelemetryMetricNamingStrategy(String prefix) { Objects.requireNonNull(prefix, "prefix cannot be null"); return new MetricNamingStrategy<MetricName>() { @Override public MetricKey metricKey(MetricName metricName) { Objects.requireNonNull(metricName, "metric name cannot be null"); return new MetricKey(fullMetricName(prefix, metricName.group(), metricName.name()), Collections.unmodifiableMap(cleanTags(metricName.tags()))); } @Override public MetricKey derivedMetricKey(MetricKey key, String derivedComponent) { Objects.requireNonNull(derivedComponent, "derived component cannot be null"); return new MetricKey(key.name() + NAME_JOINER + derivedComponent, key.tags()); } }; }
@Test public void testNullPrefix() { Exception e = assertThrows(NullPointerException.class, () -> TelemetryMetricNamingConvention .getClientTelemetryMetricNamingStrategy(null)); assertEquals("prefix cannot be null", e.getMessage()); }
@Override public boolean supportsConvert() { return false; }
@Test void assertSupportsConvert() { assertFalse(metaData.supportsConvert()); }
@VisibleForTesting static boolean areProxyPropertiesSet(String protocol) { return PROXY_PROPERTIES.stream() .anyMatch(property -> System.getProperty(protocol + "." + property) != null); }
@Test public void testAreProxyPropertiesSet_httpsPortSet() { System.setProperty("https.proxyPort", "port"); Assert.assertFalse(MavenSettingsProxyProvider.areProxyPropertiesSet("http")); Assert.assertTrue(MavenSettingsProxyProvider.areProxyPropertiesSet("https")); }
public static void retryWithBackoff( final int maxRetries, final int initialWaitMs, final int maxWaitMs, final Runnable runnable, final Class<?>... passThroughExceptions) { retryWithBackoff( maxRetries, initialWaitMs, maxWaitMs, runnable, () -> false, Arrays.stream(passThroughExceptions) .map(c -> (Predicate<Exception>) c::isInstance) .collect(Collectors.toList()) ); }
@Test public void shouldRespectMaxWait() { doThrow(new RuntimeException("error")).when(runnable).run(); try { RetryUtil.retryWithBackoff(3, 1, 3, runnable, sleep, () -> false, Collections.emptyList()); fail("retry should have thrown"); } catch (final RuntimeException e) { } verify(runnable, times(4)).run(); final InOrder inOrder = Mockito.inOrder(sleep); inOrder.verify(sleep).accept((long) 1); inOrder.verify(sleep).accept((long) 2); inOrder.verify(sleep).accept((long) 3); inOrder.verifyNoMoreInteractions(); }
public static boolean testURLPassesExclude(String url, String exclude) { // If the url doesn't decode to UTF-8 then return false, it could be trying to get around our rules with nonstandard encoding // If the exclude rule includes a "?" character, the url must exactly match the exclude rule. // If the exclude rule does not contain the "?" character, we chop off everything starting at the first "?" // in the URL and then the resulting url must exactly match the exclude rule. If the exclude ends with a "*" // (wildcard) character, and wildcards are allowed in excludes, then the URL is allowed if it exactly // matches everything before the * and there are no ".." even encoded ones characters after the "*". String decodedUrl = null; try { decodedUrl = URLDecoder.decode(url, "UTF-8"); } catch (Exception e) { return false; } if (exclude.endsWith("*") && ALLOW_WILDCARDS_IN_EXCLUDES.getValue()) { if (url.startsWith(exclude.substring(0, exclude.length()-1))) { // Now make sure that there are no ".." characters in the rest of the URL. if (!decodedUrl.contains("..")) { return true; } } } else if (exclude.contains("?")) { if (url.equals(exclude)) { return true; } } else { int paramIndex = url.indexOf("?"); if (paramIndex != -1) { url = url.substring(0, paramIndex); } if (url.equals(exclude)) { return true; } } return false; }
@Test public void pathTraversalDetectedWhenWildcardsAllowed() throws Exception { AuthCheckFilter.ALLOW_WILDCARDS_IN_EXCLUDES.setValue(true); assertFalse(AuthCheckFilter.testURLPassesExclude("setup/setup-/../../log.jsp?log=info&mode=asc&lines=All","setup/setup-*")); assertFalse(AuthCheckFilter.testURLPassesExclude("setup/setup-/%2E%2E/%2E%2E/log.jsp?log=info&mode=asc&lines=All","setup/setup-*")); assertFalse(AuthCheckFilter.testURLPassesExclude("setup/setup-s/%u002e%u002e/%u002e%u002e/log.jsp?log=info&mode=asc&lines=All", "setup/setup-*")); }
public ModuleBuilder organization(String organization) { this.organization = organization; return getThis(); }
@Test void organization() { ModuleBuilder builder = ModuleBuilder.newBuilder(); builder.organization("organization"); Assertions.assertEquals("organization", builder.build().getOrganization()); }
public boolean initAndAddIssue(Issue issue) { DefaultInputComponent inputComponent = (DefaultInputComponent) issue.primaryLocation().inputComponent(); if (noSonar(inputComponent, issue)) { return false; } ActiveRule activeRule = activeRules.find(issue.ruleKey()); if (activeRule == null) { // rule does not exist or is not enabled -> ignore the issue return false; } ScannerReport.Issue rawIssue = createReportIssue(issue, inputComponent.scannerId(), activeRule.severity()); if (filters.accept(inputComponent, rawIssue)) { write(inputComponent.scannerId(), rawIssue); return true; } return false; }
@Test public void ignore_null_active_rule() { RuleKey INACTIVE_RULE_KEY = RuleKey.of("repo", "inactive"); initModuleIssues(); DefaultIssue issue = new DefaultIssue(project) .at(new DefaultIssueLocation().on(file).at(file.selectLine(3)).message("Foo")) .forRule(INACTIVE_RULE_KEY); boolean added = moduleIssues.initAndAddIssue(issue); assertThat(added).isFalse(); verifyNoInteractions(reportPublisher); }
@Override public boolean remove(Object o) { throw new UnsupportedOperationException("LazySet is not modifiable"); }
@Test(expected = UnsupportedOperationException.class) public void testRemove_throwsException() { set.remove(null); }
@Override public boolean addClass(final Class<?> stepClass) { if (stepClasses.contains(stepClass)) { return true; } checkNoComponentAnnotations(stepClass); if (hasCucumberContextConfiguration(stepClass)) { checkOnlyOneClassHasCucumberContextConfiguration(stepClass); withCucumberContextConfiguration = stepClass; } stepClasses.add(stepClass); return true; }
@Test void shouldBeStoppableWhenFacedWithMissingContextConfiguration() { final ObjectFactory factory = new SpringFactory(); factory.addClass(WithoutContextConfiguration.class); IllegalStateException exception = assertThrows(IllegalStateException.class, factory::start); assertThat(exception.getMessage(), containsString("Failed to load ApplicationContext")); assertDoesNotThrow(factory::stop); }
@VisibleForTesting void validateRoleDuplicate(String name, String code, Long id) { // 0. 超级管理员,不允许创建 if (RoleCodeEnum.isSuperAdmin(code)) { throw exception(ROLE_ADMIN_CODE_ERROR, code); } // 1. 该 name 名字被其它角色所使用 RoleDO role = roleMapper.selectByName(name); if (role != null && !role.getId().equals(id)) { throw exception(ROLE_NAME_DUPLICATE, name); } // 2. 是否存在相同编码的角色 if (!StringUtils.hasText(code)) { return; } // 该 code 编码被其它角色所使用 role = roleMapper.selectByCode(code); if (role != null && !role.getId().equals(id)) { throw exception(ROLE_CODE_DUPLICATE, code); } }
@Test public void testValidateRoleDuplicate_nameDuplicate() { // mock 数据 RoleDO roleDO = randomPojo(RoleDO.class, o -> o.setName("role_name")); roleMapper.insert(roleDO); // 准备参数 String name = "role_name"; // 调用,并断言异常 assertServiceException(() -> roleService.validateRoleDuplicate(name, randomString(), null), ROLE_NAME_DUPLICATE, name); }
public RepositoryList getRecentRepo(String serverUrl, String token) { HttpUrl url = buildUrl(serverUrl, "/rest/api/1.0/profile/recent/repos"); return doGet(token, url, body -> buildGson().fromJson(body, RepositoryList.class)); }
@Test public void get_recent_repos() { server.enqueue(new MockResponse() .setHeader("Content-Type", "application/json;charset=UTF-8") .setBody("{\n" + " \"isLastPage\": true,\n" + " \"values\": [\n" + " {\n" + " \"slug\": \"banana\",\n" + " \"id\": 2,\n" + " \"name\": \"banana\",\n" + " \"project\": {\n" + " \"key\": \"HOY\",\n" + " \"id\": 2,\n" + " \"name\": \"hoy\"\n" + " }\n" + " },\n" + " {\n" + " \"slug\": \"potato\",\n" + " \"id\": 1,\n" + " \"name\": \"potato\",\n" + " \"project\": {\n" + " \"key\": \"HEY\",\n" + " \"id\": 1,\n" + " \"name\": \"hey\"\n" + " }\n" + " }\n" + " ]\n" + "}")); RepositoryList gsonBBSRepoList = underTest.getRecentRepo(server.url("/").toString(), "token"); assertThat(gsonBBSRepoList.isLastPage()).isTrue(); assertThat(gsonBBSRepoList.getValues()).hasSize(2); assertThat(gsonBBSRepoList.getValues()).extracting(Repository::getId, Repository::getName, Repository::getSlug, g -> g.getProject().getId(), g -> g.getProject().getKey(), g -> g.getProject().getName()) .containsExactlyInAnyOrder( tuple(2L, "banana", "banana", 2L, "HOY", "hoy"), tuple(1L, "potato", "potato", 1L, "HEY", "hey")); }
public void transferAllStateDataToDirectory( Collection<StateHandleDownloadSpec> downloadRequests, CloseableRegistry closeableRegistry) throws Exception { // We use this closer for fine-grained shutdown of all parallel downloading. CloseableRegistry internalCloser = new CloseableRegistry(); // Make sure we also react to external close signals. closeableRegistry.registerCloseable(internalCloser); try { // We have to wait for all futures to be completed, to make sure in // case of failure that we will clean up all the files FutureUtils.completeAll( createDownloadRunnables(downloadRequests, internalCloser).stream() .map( runnable -> CompletableFuture.runAsync( runnable, transfer.getExecutorService())) .collect(Collectors.toList())) .get(); } catch (Exception e) { downloadRequests.stream() .map(StateHandleDownloadSpec::getDownloadDestination) .map(Path::toFile) .forEach(FileUtils::deleteDirectoryQuietly); // Error reporting Throwable throwable = ExceptionUtils.stripExecutionException(e); throwable = ExceptionUtils.stripException(throwable, RuntimeException.class); if (throwable instanceof IOException) { throw (IOException) throwable; } else { throw new FlinkRuntimeException("Failed to download data for state handles.", e); } } finally { // Unregister and close the internal closer. if (closeableRegistry.unregisterCloseable(internalCloser)) { IOUtils.closeQuietly(internalCloser); } } }
@Test public void testMultiThreadRestoreThreadPoolExceptionRethrow() { SpecifiedException expectedCause = new SpecifiedException("throw exception while multi thread restore."); StreamStateHandle stateHandle = new ThrowingStateHandle(expectedCause); List<HandleAndLocalPath> stateHandles = new ArrayList<>(1); stateHandles.add(HandleAndLocalPath.of(stateHandle, "state1")); IncrementalRemoteKeyedStateHandle incrementalKeyedStateHandle = new IncrementalRemoteKeyedStateHandle( UUID.randomUUID(), KeyGroupRange.EMPTY_KEY_GROUP_RANGE, 1, stateHandles, stateHandles, stateHandle); try (RocksDBStateDownloader rocksDBStateDownloader = new RocksDBStateDownloader(5)) { rocksDBStateDownloader.transferAllStateDataToDirectory( Collections.singletonList( new StateHandleDownloadSpec( incrementalKeyedStateHandle, temporaryFolder.newFolder().toPath())), new CloseableRegistry()); fail(); } catch (Exception e) { assertEquals(expectedCause, e.getCause()); } }
public Map<String, String> getAllConfigPropsWithSecretsObfuscated() { final Map<String, String> allPropsCleaned = new HashMap<>(); // build a properties map with obfuscated values for sensitive configs. // Obfuscation is handled by ConfigDef.convertToString allPropsCleaned.putAll(getKsqlConfigPropsWithSecretsObfuscated()); allPropsCleaned.putAll( getKsqlStreamConfigPropsWithSecretsObfuscated().entrySet().stream().collect( Collectors.toMap( e -> KSQL_STREAMS_PREFIX + e.getKey(), Map.Entry::getValue ) ) ); return Collections.unmodifiableMap(allPropsCleaned); }
@Test public void shouldListKnownKsqlConfig() { // Given: final KsqlConfig config = new KsqlConfig(ImmutableMap.of( KsqlConfig.KSQL_SERVICE_ID_CONFIG, "not sensitive", SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "sensitive!" )); // When: final Map<String, String> result = config.getAllConfigPropsWithSecretsObfuscated(); // Then: assertThat(result.get(KsqlConfig.KSQL_SERVICE_ID_CONFIG), is("not sensitive")); }
@VisibleForTesting public static JobGraph createJobGraph(StreamGraph streamGraph) { return new StreamingJobGraphGenerator( Thread.currentThread().getContextClassLoader(), streamGraph, null, Runnable::run) .createJobGraph(); }
@Test void testResourcesForChainedSourceSink() throws Exception { ResourceSpec resource1 = ResourceSpec.newBuilder(0.1, 100).build(); ResourceSpec resource2 = ResourceSpec.newBuilder(0.2, 200).build(); ResourceSpec resource3 = ResourceSpec.newBuilder(0.3, 300).build(); ResourceSpec resource4 = ResourceSpec.newBuilder(0.4, 400).build(); ResourceSpec resource5 = ResourceSpec.newBuilder(0.5, 500).build(); Method opMethod = getSetResourcesMethodAndSetAccessible(SingleOutputStreamOperator.class); Method sinkMethod = getSetResourcesMethodAndSetAccessible(DataStreamSink.class); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Tuple2<Integer, Integer>> source = env.addSource( new ParallelSourceFunction<Tuple2<Integer, Integer>>() { @Override public void run(SourceContext<Tuple2<Integer, Integer>> ctx) throws Exception {} @Override public void cancel() {} }); opMethod.invoke(source, resource1); DataStream<Tuple2<Integer, Integer>> map = source.map( new MapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>>() { @Override public Tuple2<Integer, Integer> map(Tuple2<Integer, Integer> value) throws Exception { return value; } }); opMethod.invoke(map, resource2); // CHAIN(Source -> Map -> Filter) DataStream<Tuple2<Integer, Integer>> filter = map.filter( new FilterFunction<Tuple2<Integer, Integer>>() { @Override public boolean filter(Tuple2<Integer, Integer> value) throws Exception { return false; } }); opMethod.invoke(filter, resource3); DataStream<Tuple2<Integer, Integer>> reduce = filter.keyBy(0) .reduce( new ReduceFunction<Tuple2<Integer, Integer>>() { @Override public Tuple2<Integer, Integer> reduce( Tuple2<Integer, Integer> value1, Tuple2<Integer, Integer> value2) throws Exception { return new Tuple2<>(value1.f0, value1.f1 + value2.f1); } }); opMethod.invoke(reduce, resource4); DataStreamSink<Tuple2<Integer, Integer>> sink = reduce.addSink( new SinkFunction<Tuple2<Integer, Integer>>() { @Override public void invoke(Tuple2<Integer, Integer> value) throws Exception {} }); sinkMethod.invoke(sink, resource5); JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph()); JobVertex sourceMapFilterVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(0); JobVertex reduceSinkVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(1); assertThat(sourceMapFilterVertex.getMinResources()) .isEqualTo(resource3.merge(resource2).merge(resource1)); assertThat(reduceSinkVertex.getPreferredResources()).isEqualTo(resource4.merge(resource5)); }
public static String formatChineseDate(Calendar calendar, boolean withTime) { final StringBuilder result = StrUtil.builder(); // 年 final String year = String.valueOf(calendar.get(Calendar.YEAR)); final int length = year.length(); for (int i = 0; i < length; i++) { result.append(NumberChineseFormatter.numberCharToChinese(year.charAt(i), false)); } result.append('年'); // 月 int month = calendar.get(Calendar.MONTH) + 1; result.append(NumberChineseFormatter.formatThousand(month, false)); result.append('月'); // 日 int day = calendar.get(Calendar.DAY_OF_MONTH); result.append(NumberChineseFormatter.formatThousand(day, false)); result.append('日'); // 只替换年月日,时分秒中零不需要替换 String temp = result.toString().replace('零', '〇'); result.delete(0, result.length()); result.append(temp); if (withTime) { // 时 int hour = calendar.get(Calendar.HOUR_OF_DAY); result.append(NumberChineseFormatter.formatThousand(hour, false)); result.append('时'); // 分 int minute = calendar.get(Calendar.MINUTE); result.append(NumberChineseFormatter.formatThousand(minute, false)); result.append('分'); // 秒 int second = calendar.get(Calendar.SECOND); result.append(NumberChineseFormatter.formatThousand(second, false)); result.append('秒'); } return result.toString(); }
@Test public void formatChineseDate() { Calendar calendar = Objects.requireNonNull(DateUtil.parse("2018-02-24 12:13:14")).toCalendar(); final String chineseDate = CalendarUtil.formatChineseDate(calendar, false); assertEquals("二〇一八年二月二十四日", chineseDate); final String chineseDateTime = CalendarUtil.formatChineseDate(calendar, true); assertEquals("二〇一八年二月二十四日十二时十三分十四秒", chineseDateTime); }
public BlobOperationResponse createContainer(final Exchange exchange) { final Map<String, String> metadata = configurationProxy.getMetadata(exchange); final PublicAccessType publicAccessType = configurationProxy.getPublicAccessType(exchange); final Duration timeout = configurationProxy.getTimeout(exchange); final BlobExchangeHeaders blobExchangeHeaders = new BlobExchangeHeaders().httpHeaders(client.createContainer(metadata, publicAccessType, timeout)); return BlobOperationResponse.createWithEmptyBody(blobExchangeHeaders.toMap()); }
@Test void testCreateContainer() { when(client.createContainer(any(), any(), any())).thenReturn(createContainerMock()); final BlobContainerOperations blobContainerOperations = new BlobContainerOperations(configuration, client); final BlobOperationResponse response = blobContainerOperations.createContainer(null); assertNotNull(response); assertNotNull(response.getHeaders().get(BlobConstants.RAW_HTTP_HEADERS)); assertTrue((boolean) response.getBody()); }
@Bean public RateLimiterRegistry rateLimiterRegistry( RateLimiterConfigurationProperties rateLimiterProperties, EventConsumerRegistry<RateLimiterEvent> rateLimiterEventsConsumerRegistry, RegistryEventConsumer<RateLimiter> rateLimiterRegistryEventConsumer, @Qualifier("compositeRateLimiterCustomizer") CompositeCustomizer<RateLimiterConfigCustomizer> compositeRateLimiterCustomizer) { RateLimiterRegistry rateLimiterRegistry = createRateLimiterRegistry(rateLimiterProperties, rateLimiterRegistryEventConsumer, compositeRateLimiterCustomizer); registerEventConsumer(rateLimiterRegistry, rateLimiterEventsConsumerRegistry, rateLimiterProperties); rateLimiterProperties.getInstances().forEach( (name, properties) -> rateLimiterRegistry .rateLimiter(name, rateLimiterProperties .createRateLimiterConfig(properties, compositeRateLimiterCustomizer, name)) ); return rateLimiterRegistry; }
@Test public void testCreateRateLimiterRegistryWithUnknownConfig() { RateLimiterConfigurationProperties rateLimiterConfigurationProperties = new RateLimiterConfigurationProperties(); io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties instanceProperties = new io.github.resilience4j.common.ratelimiter.configuration.CommonRateLimiterConfigurationProperties.InstanceProperties(); instanceProperties.setBaseConfig("unknownConfig"); rateLimiterConfigurationProperties.getInstances().put("backend", instanceProperties); RateLimiterConfiguration rateLimiterConfiguration = new RateLimiterConfiguration(); DefaultEventConsumerRegistry<RateLimiterEvent> eventConsumerRegistry = new DefaultEventConsumerRegistry<>(); assertThatThrownBy(() -> rateLimiterConfiguration .rateLimiterRegistry(rateLimiterConfigurationProperties, eventConsumerRegistry, new CompositeRegistryEventConsumer<>(emptyList()), compositeRateLimiterCustomizerTest())) .isInstanceOf(ConfigurationNotFoundException.class) .hasMessage("Configuration with name 'unknownConfig' does not exist"); }
public void loadConnectionData() { // HACK: need to check if onload event was already fired. // It is called from XulDatabaseDialog from dcDialog.getSwtInstance(shell); AND dialog.show(); // Multiple calls lead to multiple numbers of database types. // Therefore we check if the connectionBox was already filled. if ( connectionBox != null ) { return; } getControls(); // Add sorted types to the listbox now. final SortedSet<String> keys = new TreeSet<String>( connectionMap.keySet() ); for ( String key : keys ) { connectionBox.addItem( key ); } PluginRegistry registry = PluginRegistry.getInstance(); registry.addPluginListener( DatabasePluginType.class, new DatabaseTypeListener( registry ) { @Override public void databaseTypeAdded( String pluginName, DatabaseInterface databaseInterface ) { if ( keys.add( pluginName ) ) { update(); } } @Override public void databaseTypeRemoved( String pluginName ) { if ( keys.remove( pluginName ) ) { update(); } } private void update() { Display.getDefault().syncExec( new Runnable() { @Override public void run() { connectionBox.removeItems(); for ( String key : keys ) { connectionBox.addItem( key ); } } } ); } } ); // HACK: Need to force height of list control, as it does not behave // well when using relative layouting connectionBox.setRows( connectionBox.getRows() ); Object key = connectionBox.getSelectedItem(); // Nothing selected yet...select first item. // TODO Implement a connection type preference, // and use that type as the default for // new databases. if ( key == null ) { key = connectionMap.firstKey(); connectionBox.setSelectedItem( key ); } // HACK: Need to force selection of first panel if ( dialogDeck != null ) { setDeckChildIndex(); } setDefaultPoolParameters(); // HACK: reDim the pooling table if ( poolParameterTree != null ) { poolParameterTree.setRows( poolParameterTree.getRows() ); } }
@Test public void testLoadConnectionDataWithSelectedItem() throws Exception { DatabaseInterface dbInterface = mock( DatabaseInterface.class ); when( dbInterface.getDefaultDatabasePort() ).thenReturn( 5309 ); when( connectionBox.getSelectedItem() ).thenReturn( "myDb" ); dataHandler.loadConnectionData(); }
public static String capitalize(String str) { return changeFirstCharacterCase(str, true); }
@Test public void testCapitalize() { Assert.assertNull(StringUtil.capitalize(null)); Assert.assertEquals("Foo", StringUtil.capitalize("foo")); }
public static <T> Callable<T> wrapCallable( Map<String, String> contextData, Callable<T> command) { return () -> { try (MdcCloseable ctx = withContext(contextData)) { return command.call(); } }; }
@Test void testWrapCallable() throws Exception { assertJobIDLogged( jobID -> wrapCallable( asContextData(jobID), () -> { LOGGER.info("ignore"); return null; }) .call()); }
@CanIgnoreReturnValue @SuppressWarnings("deprecation") // TODO(b/134064106): design an alternative to no-arg check() public final Ordered containsExactly() { return check().about(iterableEntries()).that(checkNotNull(actual).entries()).containsExactly(); }
@Test public void containsExactlyFailureWithEmptyStringBoth() { expectFailureWhenTestingThat(ImmutableMultimap.of("a", "")).containsExactly("", "a"); assertFailureKeys("missing", "unexpected", "---", "expected", "but was"); assertFailureValue("missing", "{\"\" (empty String)=[a]}"); assertFailureValue("unexpected", "{a=[\"\" (empty String)]}"); assertFailureValue("expected", "{\"\" (empty String)=[a]}"); assertFailureValue("but was", "{a=[]}"); }
public float getStringWidth(String text) throws IOException { byte[] bytes = encode(text); ByteArrayInputStream in = new ByteArrayInputStream(bytes); float width = 0; while (in.available() > 0) { int code = readCode(in); width += getWidth(code); } return width; }
@Test void testPDFox5048() throws IOException, URISyntaxException { try (PDDocument doc = Loader.loadPDF(RandomAccessReadBuffer.createBufferFromStream( new URI("https://issues.apache.org/jira/secure/attachment/13017227/stringwidth.pdf") .toURL().openStream()))) { PDPage page = doc.getPage(0); PDFont font = page.getResources().getFont(COSName.getPDFName("F70")); assertTrue(font.isDamaged()); assertEquals(0, font.getHeight(0)); assertEquals(0, font.getStringWidth("Pa")); } }
@Override public Optional<AllocatedSlot> removeSlot(AllocationID allocationId) { final AllocatedSlot removedSlot = removeSlotInternal(allocationId); if (removedSlot != null) { final ResourceID owner = removedSlot.getTaskManagerId(); slotsPerTaskExecutor.computeIfPresent( owner, (resourceID, allocationIds) -> { allocationIds.remove(allocationId); if (allocationIds.isEmpty()) { return null; } return allocationIds; }); return Optional.of(removedSlot); } else { return Optional.empty(); } }
@Test void testRemoveSlot() { final DefaultAllocatedSlotPool slotPool = new DefaultAllocatedSlotPool(); final Collection<AllocatedSlot> slots = createAllocatedSlots(); slotPool.addSlots(slots, 0); final Iterator<AllocatedSlot> iterator = slots.iterator(); final AllocatedSlot removedSlot = iterator.next(); iterator.remove(); slotPool.removeSlot(removedSlot.getAllocationId()); assertSlotPoolContainsSlots(slotPool, slots); }
public FEELFnResult<List<Object>> invoke(@ParameterName("list") Object[] lists) { if ( lists == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } // spec requires us to return a new list final List<Object> result = new ArrayList<>(); for ( Object list : lists ) { if ( list == null ) { // TODO review accordingly to spec, original behavior was: return null; return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "lists", "one of the elements in the list is null")); } else if ( list instanceof Collection ) { result.addAll( (Collection) list ); } else { result.add( list ); } } return FEELFnResult.ofResult( result ); }
@Test void invokeArrayWithNull() { FunctionTestUtil.assertResultError(concatenateFunction.invoke(new Object[]{null}), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(concatenateFunction.invoke(new Object[]{1, null}), InvalidParametersEvent.class); }
public Set<? extends AuthenticationRequest> getRequest(final Host bookmark, final LoginCallback prompt) throws LoginCanceledException { final StringBuilder url = new StringBuilder(); url.append(bookmark.getProtocol().getScheme().toString()).append("://"); url.append(bookmark.getHostname()); if(!(bookmark.getProtocol().getScheme().getPort() == bookmark.getPort())) { url.append(":").append(bookmark.getPort()); } final String context = PathNormalizer.normalize(bookmark.getProtocol().getContext()); // Custom authentication context url.append(context); if(bookmark.getProtocol().getDefaultHostname().endsWith("identity.api.rackspacecloud.com") || bookmark.getHostname().endsWith("identity.api.rackspacecloud.com")) { return Collections.singleton(new Authentication20RAXUsernameKeyRequest( URI.create(url.toString()), bookmark.getCredentials().getUsername(), bookmark.getCredentials().getPassword(), null) ); } final LoginOptions options = new LoginOptions(bookmark.getProtocol()).password(false).anonymous(false).publickey(false); if(context.contains("1.0")) { return Collections.singleton(new Authentication10UsernameKeyRequest(URI.create(url.toString()), bookmark.getCredentials().getUsername(), bookmark.getCredentials().getPassword())); } else if(context.contains("1.1")) { return Collections.singleton(new Authentication11UsernameKeyRequest(URI.create(url.toString()), bookmark.getCredentials().getUsername(), bookmark.getCredentials().getPassword())); } else if(context.contains("2.0")) { // Prompt for tenant final String user; final String tenant; if(StringUtils.contains(bookmark.getCredentials().getUsername(), ':')) { final String[] parts = StringUtils.splitPreserveAllTokens(bookmark.getCredentials().getUsername(), ':'); tenant = parts[0]; user = parts[1]; } else { user = bookmark.getCredentials().getUsername(); tenant = prompt.prompt(bookmark, bookmark.getCredentials().getUsername(), LocaleFactory.localizedString("Provide additional login credentials", "Credentials"), LocaleFactory.localizedString("Tenant Name", "Mosso"), options .usernamePlaceholder(LocaleFactory.localizedString("Tenant Name", "Mosso"))).getUsername(); // Save tenant in username bookmark.getCredentials().setUsername(String.format("%s:%s", tenant, bookmark.getCredentials().getUsername())); } final Set<AuthenticationRequest> requests = new LinkedHashSet<>(); requests.add(new Authentication20UsernamePasswordRequest( URI.create(url.toString()), user, bookmark.getCredentials().getPassword(), tenant) ); requests.add(new Authentication20UsernamePasswordTenantIdRequest( URI.create(url.toString()), user, bookmark.getCredentials().getPassword(), tenant) ); requests.add(new Authentication20AccessKeySecretKeyRequest( URI.create(url.toString()), user, bookmark.getCredentials().getPassword(), tenant)); return requests; } else if(context.contains("3")) { // Prompt for project final String user; final String project; final String domain; if(StringUtils.contains(bookmark.getCredentials().getUsername(), ':')) { final String[] parts = StringUtils.splitPreserveAllTokens(bookmark.getCredentials().getUsername(), ':'); if(parts.length == 3) { project = parts[0]; domain = parts[1]; user = parts[2]; } else { project = parts[0]; user = parts[1]; domain = prompt.prompt(bookmark, bookmark.getCredentials().getUsername(), LocaleFactory.localizedString("Provide additional login credentials", "Credentials"), LocaleFactory.localizedString("Project Domain Name", "Mosso"), options .usernamePlaceholder(LocaleFactory.localizedString("Project Domain Name", "Mosso"))).getUsername(); // Save project name and domain in username bookmark.getCredentials().setUsername(String.format("%s:%s:%s", project, domain, bookmark.getCredentials().getUsername())); } } else { user = bookmark.getCredentials().getUsername(); final Credentials projectName = prompt.prompt(bookmark, bookmark.getCredentials().getUsername(), LocaleFactory.localizedString("Provide additional login credentials", "Credentials"), LocaleFactory.localizedString("Project Name", "Mosso"), options .usernamePlaceholder(LocaleFactory.localizedString("Project Name", "Mosso"))); if(StringUtils.contains(bookmark.getCredentials().getUsername(), ':')) { final String[] parts = StringUtils.splitPreserveAllTokens(projectName.getUsername(), ':'); project = parts[0]; domain = parts[1]; } else { project = projectName.getUsername(); domain = prompt.prompt(bookmark, bookmark.getCredentials().getUsername(), LocaleFactory.localizedString("Provide additional login credentials", "Credentials"), LocaleFactory.localizedString("Project Domain Name", "Mosso"), options .usernamePlaceholder(LocaleFactory.localizedString("Project Domain Name", "Mosso"))).getUsername(); } // Save project name and domain in username bookmark.getCredentials().setUsername(String.format("%s:%s:%s", project, domain, bookmark.getCredentials().getUsername())); } final Set<AuthenticationRequest> requests = new LinkedHashSet<>(); requests.add(new Authentication3UsernamePasswordProjectRequest( URI.create(url.toString()), user, bookmark.getCredentials().getPassword(), project, domain) ); return requests; } else { log.warn(String.format("Unknown context version in %s. Default to v1 authentication.", context)); // Default to 1.0 return Collections.singleton(new Authentication10UsernameKeyRequest(URI.create(url.toString()), bookmark.getCredentials().getUsername(), bookmark.getCredentials().getPassword())); } }
@Test public void testGetRequest() throws Exception { final SwiftAuthenticationService s = new SwiftAuthenticationService(); final SwiftProtocol protocol = new SwiftProtocol() { @Override public String getContext() { return "/v1.0"; } }; assertEquals(Client.AuthVersion.v20, s.getRequest(new Host(protocol, "identity.api.rackspacecloud.com", new Credentials("u", "P")), new DisabledLoginCallback()).iterator().next().getVersion()); assertEquals(Client.AuthVersion.v10, s.getRequest(new Host(protocol, "region-b.geo-1.identity.hpcloudsvc.com", new Credentials("u", "P")), new DisabledLoginCallback()).iterator().next().getVersion()); assertEquals(Client.AuthVersion.v10, s.getRequest(new Host(protocol, "myhost", new Credentials("u", "P")), new DisabledLoginCallback()).iterator().next().getVersion()); assertEquals(Client.AuthVersion.v10, s.getRequest(new Host(protocol, "myhost", new Credentials("u", "P")), new DisabledLoginCallback()).iterator().next().getVersion()); assertEquals("GET", s.getRequest(new Host(protocol, "myhost", new Credentials("u", "P")), new DisabledLoginCallback()).iterator().next().getMethod()); assertEquals("POST", s.getRequest(new Host(protocol, "lon.identity.api.rackspacecloud.com", new Credentials("u", "P")), new DisabledLoginCallback()).iterator().next().getMethod()); final Host host = new Host(protocol, "identity.openstack.com", new Credentials("u", "P")); host.setPort(3451); assertEquals(URI.create("https://identity.openstack.com:3451/v1.0"), s.getRequest(host, new DisabledLoginCallback()).iterator().next().getURI()); assertEquals(Client.AuthVersion.v10, s.getRequest(host, new DisabledLoginCallback()).iterator().next().getVersion()); assertEquals(Authentication10UsernameKeyRequest.class, s.getRequest(host, new DisabledLoginCallback()).iterator().next().getClass()); }
@Override public Health checkNode() { return nodeHealthChecks.stream() .map(NodeHealthCheck::check) .reduce(Health.GREEN, HealthReducer::merge); }
@Test public void checkNode_returns_causes_of_all_NodeHealthCheck_whichever_their_status() { NodeHealthCheck[] nodeHealthChecks = IntStream.range(0, 1 + random.nextInt(20)) .mapToObj(s -> new HardcodedHealthNodeCheck(IntStream.range(0, random.nextInt(3)).mapToObj(i -> randomAlphanumeric(3)).toArray(String[]::new))) .map(NodeHealthCheck.class::cast) .toArray(NodeHealthCheck[]::new); String[] expected = Arrays.stream(nodeHealthChecks).map(NodeHealthCheck::check).flatMap(s -> s.getCauses().stream()).toArray(String[]::new); HealthCheckerImpl underTest = new HealthCheckerImpl(nodeInformation, nodeHealthChecks); assertThat(underTest.checkNode().getCauses()).containsOnly(expected); }
public static ScramCredential credentialFromString(String str) { Properties props = toProps(str); if (props.size() != 4 || !props.containsKey(SALT) || !props.containsKey(STORED_KEY) || !props.containsKey(SERVER_KEY) || !props.containsKey(ITERATIONS)) { throw new IllegalArgumentException("Credentials not valid: " + str); } byte[] salt = Base64.getDecoder().decode(props.getProperty(SALT)); byte[] storedKey = Base64.getDecoder().decode(props.getProperty(STORED_KEY)); byte[] serverKey = Base64.getDecoder().decode(props.getProperty(SERVER_KEY)); int iterations = Integer.parseInt(props.getProperty(ITERATIONS)); return new ScramCredential(salt, storedKey, serverKey, iterations); }
@Test public void invalidCredential() { assertThrows(IllegalArgumentException.class, () -> ScramCredentialUtils.credentialFromString("abc")); }
@Path("/{destination}") @PUT @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public void sendProvisioningMessage(@ReadOnly @Auth AuthenticatedDevice auth, @PathParam("destination") String destinationName, @NotNull @Valid ProvisioningMessage message, @HeaderParam(HttpHeaders.USER_AGENT) String userAgent) throws RateLimitExceededException { if (message.body().length() > MAX_MESSAGE_SIZE) { Metrics.counter(REJECT_OVERSIZE_MESSAGE_COUNTER, Tags.of(UserAgentTagUtil.getPlatformTag(userAgent))) .increment(); throw new WebApplicationException(Response.Status.BAD_REQUEST); } rateLimiters.getMessagesLimiter().validate(auth.getAccount().getUuid()); if (!provisioningManager.sendProvisioningMessage(ProvisioningAddress.create(destinationName), Base64.getMimeDecoder().decode(message.body()))) { throw new WebApplicationException(Response.Status.NOT_FOUND); } }
@Test void sendProvisioningMessageRateLimited() throws RateLimitExceededException { final String destination = UUID.randomUUID().toString(); final byte[] messageBody = "test".getBytes(StandardCharsets.UTF_8); doThrow(new RateLimitExceededException(Duration.ZERO)) .when(messagesRateLimiter).validate(AuthHelper.VALID_UUID); try (final Response response = RESOURCE_EXTENSION.getJerseyTest() .target("/v1/provisioning/" + destination) .request() .header("Authorization", AuthHelper.getAuthHeader(AuthHelper.VALID_UUID, AuthHelper.VALID_PASSWORD)) .put(Entity.entity(new ProvisioningMessage(Base64.getMimeEncoder().encodeToString(messageBody)), MediaType.APPLICATION_JSON))) { assertEquals(429, response.getStatus()); verify(provisioningManager, never()).sendProvisioningMessage(any(), any()); } }
public byte[] encodeToByteArray(List<Integer> input) { checkEncodeInputValidity(input); // Find address byte length by rounding up (totalBitLength / 8) byte[] address = new byte[(totalBitLength + 7) >> 3]; if (!positiveIntegersOnly) { // Modify sign bits to preserve ordering between positive and negative integers int bitIndex = totalBitLength - 1; for (int value : input) { byte signBit = (value < 0) ? (byte) 0 : 1; address[bitIndex >> 3] |= signBit << (bitIndex & 7); bitIndex--; } } int bitIndex = positiveIntegersOnly ? totalBitLength - 1 : totalBitLength - encodingBits.size() - 1; // Interweave input bits into address from the most significant bit to preserve data locality for (int bitsProcessed = 0; bitsProcessed < maxBitLength; bitsProcessed++) { for (int index = 0; index < input.size(); index++) { if (bitsProcessed >= encodingBits.get(index)) { continue; } int bitPosition = encodingBits.get(index) - bitsProcessed - 1; byte maskedBit = (byte) ((input.get(index) >> bitPosition) & 1); address[bitIndex >> 3] |= maskedBit << (bitIndex & 7); bitIndex--; } } return address; }
@Test public void testZOrderEmptyInput() { ZOrder zOrder = new ZOrder(ImmutableList.of(8, 8), true); List<Integer> intColumns = ImmutableList.of(); try { zOrder.encodeToByteArray(intColumns); fail("Expected test to fail: input size should be greater than zero."); } catch (IllegalArgumentException e) { String expectedMessage = "Input list size should be greater than zero."; assertEquals(e.getMessage(), expectedMessage, format("Expected exception message '%s' to match '%s'", e.getMessage(), expectedMessage)); } }
@Override public boolean equals(@Nullable Object object) { if (object instanceof DistributionCell) { DistributionCell distributionCell = (DistributionCell) object; return Objects.equals(dirty, distributionCell.dirty) && Objects.equals(value.get(), distributionCell.value.get()) && Objects.equals(name, distributionCell.name); } return false; }
@Test public void testEquals() { DistributionCell distributionCell = new DistributionCell(MetricName.named("namespace", "name")); DistributionCell equal = new DistributionCell(MetricName.named("namespace", "name")); Assert.assertEquals(distributionCell, equal); Assert.assertEquals(distributionCell.hashCode(), equal.hashCode()); }
public URLNormalizer removeTrailingSlash() { String urlRoot = HttpURL.getRoot(url); String path = toURL().getPath(); String urlRootAndPath = urlRoot + path; if (path.endsWith("/")) { String newPath = StringUtils.removeEnd(path, "/"); String newUrlRootAndPath = urlRoot + newPath; url = StringUtils.replaceOnce( url, urlRootAndPath, newUrlRootAndPath); } return this; }
@Test public void testRemoveTrailingSlash() { s = "http://www.example.com/alice/"; t = "http://www.example.com/alice"; assertEquals(t, n(s).removeTrailingSlash().toString()); s = "http://www.example.com/alice.html"; t = "http://www.example.com/alice.html"; assertEquals(t, n(s).removeTrailingSlash().toString()); s = "http://www.example.com"; t = "http://www.example.com"; assertEquals(t, n(s).removeTrailingSlash().toString()); s = "http://www.example.com/blah/?param=value"; t = "http://www.example.com/blah?param=value"; assertEquals(t, n(s).removeTrailingSlash().toString()); s = "http://www.example.com/blah?param=value"; t = "http://www.example.com/blah?param=value"; assertEquals(t, n(s).removeTrailingSlash().toString()); s = "http://www.example.com/blah/#value"; t = "http://www.example.com/blah#value"; assertEquals(t, n(s).removeTrailingSlash().toString()); s = "http://www.example.com/blah#value"; t = "http://www.example.com/blah#value"; assertEquals(t, n(s).removeTrailingSlash().toString()); s = "http://www.example.com/"; t = "http://www.example.com"; assertEquals(t, n(s).removeTrailingSlash().toString()); }
@Override public void validateConnectorConfig(Map<String, String> connectorProps, Callback<ConfigInfos> callback) { validateConnectorConfig(connectorProps, callback, true); }
@Test public void testConfigValidationMultipleNullConfig() { AbstractHerder herder = createConfigValidationHerder(SampleSourceConnector.class, noneConnectorClientConfigOverridePolicy); Map<String, String> config = new HashMap<>(); config.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, SampleSourceConnector.class.getName()); config.put("name", "somename"); config.put("required", "value"); config.put("testKey", null); config.put("secondTestKey", null); final ConfigInfos configInfos = herder.validateConnectorConfig(config, s -> null, false); assertEquals(2, configInfos.errorCount()); assertErrorForKey(configInfos, "testKey"); assertErrorForKey(configInfos, "secondTestKey"); verifyValidationIsolation(); }
public static byte[] parseMAC(String value) { final byte[] machineId; final char separator; switch (value.length()) { case 17: separator = value.charAt(2); validateMacSeparator(separator); machineId = new byte[EUI48_MAC_ADDRESS_LENGTH]; break; case 23: separator = value.charAt(2); validateMacSeparator(separator); machineId = new byte[EUI64_MAC_ADDRESS_LENGTH]; break; default: throw new IllegalArgumentException("value is not supported [MAC-48, EUI-48, EUI-64]"); } final int end = machineId.length - 1; int j = 0; for (int i = 0; i < end; ++i, j += 3) { final int sIndex = j + 2; machineId[i] = StringUtil.decodeHexByte(value, j); if (value.charAt(sIndex) != separator) { throw new IllegalArgumentException("expected separator '" + separator + " but got '" + value.charAt(sIndex) + "' at index: " + sIndex); } } machineId[end] = StringUtil.decodeHexByte(value, j); return machineId; }
@Test public void testParseMacInvalidEUI64TrailingSeparatorA() { assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { parseMAC("00-AA-11-FF-FE-BB-22-CC-"); } }); }