focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
protected void handle(com.drew.metadata.Metadata metadataExtractor) throws MetadataException { handle(metadataExtractor.getDirectories().iterator()); }
@Test public void testExifHandlerParseDateFallback() throws MetadataException { ExifIFD0Directory exif = Mockito.mock(ExifIFD0Directory.class); Mockito.when(exif.containsTag(ExifIFD0Directory.TAG_DATETIME)).thenReturn(true); GregorianCalendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC"), Locale.ROOT); calendar.setTimeInMillis(0); calendar.set(1999, 0, 1, 0, 0, 0); Mockito.when(exif.getDate(ExifIFD0Directory.TAG_DATETIME)) .thenReturn(calendar.getTime()); // UTC timezone as in Metadata Extractor Metadata metadata = new Metadata(); new ImageMetadataExtractor.ExifHandler().handle(exif, metadata); assertEquals("1999-01-01T00:00:00", metadata.get(TikaCoreProperties.CREATED), "Should try EXIF Date/Time if Original is not set"); }
@Override public synchronized Snapshot record(long duration, TimeUnit durationUnit, Outcome outcome) { totalAggregation.record(duration, durationUnit, outcome); moveWindowByOne().record(duration, durationUnit, outcome); return new SnapshotImpl(totalAggregation); }
@Test public void testSlidingWindowMetrics() { FixedSizeSlidingWindowMetrics metrics = new FixedSizeSlidingWindowMetrics(4); Snapshot snapshot = metrics.record(100, TimeUnit.MILLISECONDS, Metrics.Outcome.ERROR); assertThat(snapshot.getTotalNumberOfCalls()).isEqualTo(1); assertThat(snapshot.getNumberOfSuccessfulCalls()).isZero(); assertThat(snapshot.getNumberOfFailedCalls()).isEqualTo(1); assertThat(snapshot.getTotalDuration().toMillis()).isEqualTo(100); assertThat(snapshot.getAverageDuration().toMillis()).isEqualTo(100); assertThat(snapshot.getFailureRate()).isEqualTo(100); snapshot = metrics.record(100, TimeUnit.MILLISECONDS, Metrics.Outcome.SUCCESS); assertThat(snapshot.getTotalNumberOfCalls()).isEqualTo(2); assertThat(snapshot.getNumberOfSuccessfulCalls()).isEqualTo(1); assertThat(snapshot.getNumberOfFailedCalls()).isEqualTo(1); assertThat(snapshot.getTotalDuration().toMillis()).isEqualTo(200); assertThat(snapshot.getAverageDuration().toMillis()).isEqualTo(100); assertThat(snapshot.getFailureRate()).isEqualTo(50); snapshot = metrics.record(100, TimeUnit.MILLISECONDS, Metrics.Outcome.SUCCESS); assertThat(snapshot.getTotalNumberOfCalls()).isEqualTo(3); assertThat(snapshot.getNumberOfSuccessfulCalls()).isEqualTo(2); assertThat(snapshot.getNumberOfFailedCalls()).isEqualTo(1); assertThat(snapshot.getTotalDuration().toMillis()).isEqualTo(300); assertThat(snapshot.getAverageDuration().toMillis()).isEqualTo(100); assertThat(snapshot.getFailureRate()).isEqualTo(33.333332f); snapshot = metrics.record(100, TimeUnit.MILLISECONDS, Metrics.Outcome.SUCCESS); assertThat(snapshot.getTotalNumberOfCalls()).isEqualTo(4); assertThat(snapshot.getNumberOfSuccessfulCalls()).isEqualTo(3); assertThat(snapshot.getNumberOfFailedCalls()).isEqualTo(1); assertThat(snapshot.getTotalDuration().toMillis()).isEqualTo(400); assertThat(snapshot.getAverageDuration().toMillis()).isEqualTo(100); assertThat(snapshot.getFailureRate()).isEqualTo(25); snapshot = metrics.record(100, TimeUnit.MILLISECONDS, Metrics.Outcome.SUCCESS); assertThat(snapshot.getTotalNumberOfCalls()).isEqualTo(4); assertThat(snapshot.getNumberOfSuccessfulCalls()).isEqualTo(4); assertThat(snapshot.getNumberOfFailedCalls()).isZero(); assertThat(snapshot.getTotalDuration().toMillis()).isEqualTo(400); assertThat(snapshot.getAverageDuration().toMillis()).isEqualTo(100); assertThat(snapshot.getFailureRate()).isZero(); }
public abstract void filter(Metadata metadata) throws TikaException;
@Test public void testDateNormalizingFilter() throws Exception { //test that a Date lacking a timezone, if interpreted as Los Angeles, for example, //yields a UTC string that is properly +7 hours. Metadata m = new Metadata(); m.set(TikaCoreProperties.CREATED, "2021-07-23T01:02:24"); DateNormalizingMetadataFilter filter = new DateNormalizingMetadataFilter(); filter.setDefaultTimeZone("America/Los_Angeles"); filter.filter(m); assertEquals("2021-07-23T08:02:24Z", m.get(TikaCoreProperties.CREATED)); }
final void saveDuplications(final DefaultInputComponent component, List<CloneGroup> duplications) { if (duplications.size() > MAX_CLONE_GROUP_PER_FILE) { LOG.warn("Too many duplication groups on file {}. Keep only the first {} groups.", component, MAX_CLONE_GROUP_PER_FILE); } Iterable<ScannerReport.Duplication> reportDuplications = duplications.stream() .limit(MAX_CLONE_GROUP_PER_FILE) .map( new Function<CloneGroup, Duplication>() { private final ScannerReport.Duplication.Builder dupBuilder = ScannerReport.Duplication.newBuilder(); private final ScannerReport.Duplicate.Builder blockBuilder = ScannerReport.Duplicate.newBuilder(); @Override public ScannerReport.Duplication apply(CloneGroup input) { return toReportDuplication(component, dupBuilder, blockBuilder, input); } })::iterator; publisher.getWriter().writeComponentDuplications(component.scannerId(), reportDuplications); }
@Test public void should_save_two_duplicated_groups_involving_three_files() { List<CloneGroup> groups = Arrays.asList( newCloneGroup(new ClonePart(batchComponent1.key(), 0, 5, 204), new ClonePart(batchComponent2.key(), 0, 15, 214)), newCloneGroup(new ClonePart(batchComponent1.key(), 0, 15, 214), new ClonePart(batchComponent3.key(), 0, 15, 214))); executor.saveDuplications(batchComponent1, groups); Duplication[] dups = readDuplications(2); assertDuplication(dups[0], 5, 204, batchComponent2.scannerId(), 15, 214); assertDuplication(dups[1], 15, 214, batchComponent3.scannerId(), 15, 214); }
@Override public ConsumerGroupMetadata groupMetadata() { acquireAndEnsureOpen(); try { maybeThrowInvalidGroupIdException(); return groupMetadata.get().get(); } finally { release(); } }
@Test public void testGroupMetadataUpdate() { final String groupId = "consumerGroupA"; final ConsumerConfig config = new ConsumerConfig(requiredConsumerConfigAndGroupId(groupId)); try (final MockedStatic<RequestManagers> requestManagers = mockStatic(RequestManagers.class)) { consumer = newConsumer(config); final ConsumerGroupMetadata oldGroupMetadata = consumer.groupMetadata(); final MemberStateListener groupMetadataUpdateListener = captureGroupMetadataUpdateListener(requestManagers); final int expectedMemberEpoch = 42; final String expectedMemberId = "memberId"; groupMetadataUpdateListener.onMemberEpochUpdated( Optional.of(expectedMemberEpoch), Optional.of(expectedMemberId) ); final ConsumerGroupMetadata newGroupMetadata = consumer.groupMetadata(); assertEquals(oldGroupMetadata.groupId(), newGroupMetadata.groupId()); assertEquals(expectedMemberId, newGroupMetadata.memberId()); assertEquals(expectedMemberEpoch, newGroupMetadata.generationId()); assertEquals(oldGroupMetadata.groupInstanceId(), newGroupMetadata.groupInstanceId()); } }
public static StructType groupingKeyType(Schema schema, Collection<PartitionSpec> specs) { return buildPartitionProjectionType("grouping key", specs, commonActiveFieldIds(schema, specs)); }
@Test public void testGroupingKeyTypeWithEvolvedIntoUnpartitionedSpecV1Table() { TestTables.TestTable table = TestTables.create(tableDir, "test", SCHEMA, BY_DATA_SPEC, V1_FORMAT_VERSION); table.updateSpec().removeField("data").commit(); assertThat(table.specs()).hasSize(2); StructType expectedType = StructType.of(); StructType actualType = Partitioning.groupingKeyType(table.schema(), table.specs().values()); assertThat(actualType).isEqualTo(expectedType); }
public static <RestrictionT, PositionT> RestrictionTracker<RestrictionT, PositionT> observe( RestrictionTracker<RestrictionT, PositionT> restrictionTracker, ClaimObserver<PositionT> claimObserver) { if (restrictionTracker instanceof RestrictionTracker.HasProgress) { return new RestrictionTrackerObserverWithProgress<>(restrictionTracker, claimObserver); } else { return new RestrictionTrackerObserver<>(restrictionTracker, claimObserver); } }
@Test public void testClaimObserversMaintainBacklogInterfaces() { RestrictionTracker hasSize = RestrictionTrackers.observe(new RestrictionTrackerWithProgress(), null); assertThat(hasSize, instanceOf(HasProgress.class)); }
static QueryId buildId( final Statement statement, final EngineContext engineContext, final QueryIdGenerator idGenerator, final OutputNode outputNode, final boolean createOrReplaceEnabled, final Optional<String> withQueryId) { if (withQueryId.isPresent()) { final String queryId = withQueryId.get().toUpperCase(); validateWithQueryId(queryId); return new QueryId(queryId); } if (statement instanceof CreateTable && ((CreateTable) statement).isSource()) { // Use the CST name as part of the QueryID final String suffix = ((CreateTable) statement).getName().text().toUpperCase() + "_" + idGenerator.getNext().toUpperCase(); return new QueryId(ReservedQueryIdsPrefixes.CST + suffix); } if (!outputNode.getSinkName().isPresent()) { final String prefix = "transient_" + outputNode.getSource().getLeftmostSourceNode().getAlias().text() + "_"; return new QueryId(prefix + Math.abs(ThreadLocalRandom.current().nextLong())); } final KsqlStructuredDataOutputNode structured = (KsqlStructuredDataOutputNode) outputNode; if (!structured.createInto()) { return new QueryId(ReservedQueryIdsPrefixes.INSERT + idGenerator.getNext()); } final SourceName sink = outputNode.getSinkName().get(); final Set<QueryId> queriesForSink = engineContext.getQueryRegistry().getQueriesWithSink(sink); if (queriesForSink.size() > 1) { throw new KsqlException("REPLACE for sink " + sink + " is not supported because there are " + "multiple queries writing into it: " + queriesForSink); } else if (!queriesForSink.isEmpty()) { if (!createOrReplaceEnabled) { final String type = outputNode.getNodeOutputType().getKsqlType().toLowerCase(); throw new UnsupportedOperationException( String.format( "Cannot add %s '%s': A %s with the same name already exists", type, sink.text(), type)); } return Iterables.getOnlyElement(queriesForSink); } final String suffix = outputNode.getId().toString().toUpperCase() + "_" + idGenerator.getNext().toUpperCase(); return new QueryId( outputNode.getNodeOutputType() == DataSourceType.KTABLE ? ReservedQueryIdsPrefixes.CTAS + suffix : ReservedQueryIdsPrefixes.CSAS + suffix ); }
@Test public void shouldComputeQueryIdCorrectlyForNewStream() { // Given: when(plan.getSinkName()).thenReturn(Optional.of(SINK)); when(plan.getId()).thenReturn(new PlanNodeId("FOO")); when(plan.getNodeOutputType()).thenReturn(DataSourceType.KSTREAM); when(plan.createInto()).thenReturn(true); when(idGenerator.getNext()).thenReturn("1"); when(queryRegistry.getQueriesWithSink(SINK)).thenReturn(ImmutableSet.of()); // When: final QueryId queryId = QueryIdUtil.buildId(statement, engineContext, idGenerator, plan, false, Optional.empty()); // Then: assertThat(queryId, is(new QueryId("CSAS_FOO_1"))); }
public void setCompostState(FarmingPatch fp, CompostState state) { log.debug("Storing compost state [{}] for patch [{}]", state, fp); if (state == null) { configManager.unsetRSProfileConfiguration(TimeTrackingConfig.CONFIG_GROUP, configKey(fp)); } else { configManager.setRSProfileConfiguration(TimeTrackingConfig.CONFIG_GROUP, configKey(fp), state); } }
@Test public void setCompostState_storesNullChangesByClearingConfig() { compostTracker.setCompostState(farmingPatch, null); verify(configManager).unsetRSProfileConfiguration("timetracking", "MOCK.compost"); }
public final void addSource(final Topology.AutoOffsetReset offsetReset, final String name, final TimestampExtractor timestampExtractor, final Deserializer<?> keyDeserializer, final Deserializer<?> valDeserializer, final String... topics) { if (topics.length == 0) { throw new TopologyException("You must provide at least one topic"); } Objects.requireNonNull(name, "name must not be null"); if (nodeFactories.containsKey(name)) { throw new TopologyException("Processor " + name + " is already added."); } for (final String topic : topics) { Objects.requireNonNull(topic, "topic names cannot be null"); validateTopicNotAlreadyRegistered(topic); maybeAddToResetList(earliestResetTopics, latestResetTopics, offsetReset, topic); rawSourceTopicNames.add(topic); } nodeFactories.put(name, new SourceNodeFactory<>(name, topics, null, timestampExtractor, keyDeserializer, valDeserializer)); nodeToSourceTopics.put(name, Arrays.asList(topics)); nodeGrouper.add(name); nodeGroups = null; }
@Test public void shouldNotAllowOffsetResetSourceWithDuplicateSourceName() { builder.addSource(Topology.AutoOffsetReset.EARLIEST, "source", null, stringSerde.deserializer(), stringSerde.deserializer(), "topic-1"); try { builder.addSource(Topology.AutoOffsetReset.LATEST, "source", null, stringSerde.deserializer(), stringSerde.deserializer(), "topic-2"); fail("Should throw TopologyException for duplicate source name"); } catch (final TopologyException expected) { /* ok */ } }
public static Map<String, Object> map(String metricName, Metric metric) { final Map<String, Object> metricMap = Maps.newHashMap(); metricMap.put("full_name", metricName); metricMap.put("name", metricName.substring(metricName.lastIndexOf(".") + 1)); if (metric instanceof Timer) { metricMap.put("metric", buildTimerMap((Timer) metric)); metricMap.put("type", "timer"); } else if(metric instanceof Meter) { metricMap.put("metric", buildMeterMap((Meter) metric)); metricMap.put("type", "meter"); } else if(metric instanceof Histogram) { metricMap.put("metric", buildHistogramMap((Histogram) metric)); metricMap.put("type", "histogram"); } else if(metric instanceof Counter) { metricMap.put("metric", metric); metricMap.put("type", "counter"); } else if(metric instanceof Gauge) { metricMap.put("metric", metric); metricMap.put("type", "gauge"); } else { throw new IllegalArgumentException("Unknown metric type " + metric.getClass()); } return metricMap; }
@Test public void mapSupportsGaugeLambda() { final Gauge<Integer> gauge = () -> 23; final Map<String, Object> map = MetricUtils.map("metric", gauge); assertThat(map) .containsEntry("type", "gauge") .extracting("metric") .extracting("value") .isEqualTo(23); }
@Override public Local create(final Path file) { return this.create(new UUIDRandomStringService().random(), file); }
@Test public void testCreateFile() { final String temp = StringUtils.removeEnd(System.getProperty("java.io.tmpdir"), File.separator); final String s = System.getProperty("file.separator"); assertEquals(String.format("%s%su%sp%s1742810335%sf", temp, s, s, s, s), new DefaultTemporaryFileService().create("u", new Path("/p/f", EnumSet.of(Path.Type.file))).getAbsolute()); final Path file = new Path("/p/f", EnumSet.of(Path.Type.file)); file.attributes().setRegion("region"); assertEquals(String.format("%s%su%sp%s1742810335%sf", temp, s, s, s, s), new DefaultTemporaryFileService().create("u", file).getAbsolute()); }
public void serverPing(String pluginId, List<Map<String, String>> clusterProfiles) { LOGGER.debug("Processing server ping for plugin {} with clusters {}", pluginId, clusterProfiles); extension.serverPing(pluginId, clusterProfiles); LOGGER.debug("Done processing server ping for plugin {} with clusters {}", pluginId, clusterProfiles); }
@Test public void shouldTalkToExtensionToExecuteServerPingCall() { final Map<String, String> clusterProfileProperties = Map.of("GoServerURL", "foo"); elasticAgentPluginRegistry.serverPing(PLUGIN_ID, List.of(clusterProfileProperties)); verify(elasticAgentExtension, times(1)).serverPing(PLUGIN_ID, List.of(clusterProfileProperties)); verifyNoMoreInteractions(elasticAgentExtension); }
public Set<Analysis.AliasedDataSource> extractDataSources(final AstNode node) { new Visitor().process(node, null); return getAllSources(); }
@Test public void shouldThrowIfSourceDoesNotExist() { // Given: final AstNode stmt = givenQuery("SELECT * FROM UNKNOWN;"); // When: final Exception e = assertThrows( KsqlException.class, () -> extractor.extractDataSources(stmt) ); // Then: assertThat(e.getMessage(), containsString( "UNKNOWN does not exist.")); }
@Override public void run() { // top-level command, do nothing }
@Test public void test_cancelJob_byJobName() { // Given Job job = newJob(); // When run("cancel", job.getName()); // Then assertThat(job).eventuallyHasStatus(JobStatus.FAILED); }
public static VersionMessage read(ByteBuffer payload) throws BufferUnderflowException, ProtocolException { int clientVersion = (int) ByteUtils.readUint32(payload); check(clientVersion >= ProtocolVersion.MINIMUM.intValue(), ProtocolException::new); Services localServices = Services.read(payload); Instant time = Instant.ofEpochSecond(ByteUtils.readInt64(payload)); Services receivingServices = Services.read(payload); InetAddress receivingInetAddress = PeerAddress.getByAddress(Buffers.readBytes(payload, 16)); int receivingPort = ByteUtils.readUint16BE(payload); InetSocketAddress receivingAddr = new InetSocketAddress(receivingInetAddress, receivingPort); Buffers.skipBytes(payload, NETADDR_BYTES); // addr_from // uint64 localHostNonce (random data) // We don't care about the localhost nonce. It's used to detect connecting back to yourself in cases where // there are NATs and proxies in the way. However we don't listen for inbound connections so it's // irrelevant. Buffers.skipBytes(payload, 8); // string subVer (currently "") String subVer = Buffers.readLengthPrefixedString(payload); // int bestHeight (size of known block chain). long bestHeight = ByteUtils.readUint32(payload); boolean relayTxesBeforeFilter = clientVersion >= ProtocolVersion.BLOOM_FILTER.intValue() ? payload.get() != 0 : true; return new VersionMessage(clientVersion, localServices, time, receivingServices, receivingAddr, subVer, bestHeight, relayTxesBeforeFilter); }
@Test(expected = ProtocolException.class) public void decode_relay_noBestHeight_noSubVer() { String hex = "00000000000000000000000048e5e95000000000000000000000000000000000000000000000ffff7f000001479d000000000000000000000000000000000000ffff7f000001479d0000000000000000"; VersionMessage ver = VersionMessage.read(ByteBuffer.wrap(ByteUtils.parseHex(hex))); }
public static AvroGenericCoder of(Schema schema) { return AvroGenericCoder.of(schema); }
@Test public void testAvroCoderSimpleSchemaDeterminism() { assertDeterministic(AvroCoder.of(SchemaBuilder.record("someRecord").fields().endRecord())); assertDeterministic( AvroCoder.of( SchemaBuilder.record("someRecord") .fields() .name("int") .type() .intType() .noDefault() .endRecord())); assertDeterministic( AvroCoder.of( SchemaBuilder.record("someRecord") .fields() .name("string") .type() .stringType() .noDefault() .endRecord())); assertNonDeterministic( AvroCoder.of( SchemaBuilder.record("someRecord") .fields() .name("map") .type() .map() .values() .stringType() .noDefault() .endRecord()), reason("someRecord.map", "HashMap to represent MAPs")); assertDeterministic( AvroCoder.of( SchemaBuilder.record("someRecord") .fields() .name("array") .type() .array() .items() .stringType() .noDefault() .endRecord())); assertDeterministic( AvroCoder.of( SchemaBuilder.record("someRecord") .fields() .name("enum") .type() .enumeration("anEnum") .symbols("s1", "s2") .enumDefault("s1") .endRecord())); assertDeterministic( AvroCoder.of( SchemaBuilder.unionOf() .intType() .and() .record("someRecord") .fields() .nullableString("someField", "") .endRecord() .endUnion())); }
@Override public Path touch(final Path file, final TransferStatus status) throws BackgroundException { return super.touch(file, status.withChecksum(write.checksum(file, status).compute(new NullInputStream(0L), status))); }
@Test public void testTouchUriEncoding() throws Exception { final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory)); final Path test = new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch( new Path(container, String.format("%s-+*~@([", new AsciiRandomStringService().random()), EnumSet.of(Path.Type.file)), new TransferStatus()); assertNull(test.attributes().getVersionId()); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test)); assertEquals(test.attributes(), new S3AttributesFinderFeature(session, new S3AccessControlListFeature(session)).find(test)); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertFalse(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test)); }
@Override public int actionStart(String appName) throws IOException, YarnException { int result = EXIT_SUCCESS; try { Service service = new Service(); service.setName(appName); service.setState(ServiceState.STARTED); String buffer = jsonSerDeser.toJson(service); ClientResponse response = getApiClient(getServicePath(appName)) .put(ClientResponse.class, buffer); result = processResponse(response); } catch (Exception e) { LOG.error("Fail to start application: ", e); result = EXIT_EXCEPTION_THROWN; } return result; }
@Test void testBadStart() { String appName = "unknown_app"; try { int result = badAsc.actionStart(appName); assertEquals(EXIT_EXCEPTION_THROWN, result); } catch (IOException | YarnException e) { fail(); } }
public void updateLockedJobs() { if (workerPool.anySlotsUsed()) { jobTriggerService.updateLockedJobTriggers(); } }
@Test void updateLockedJobsOnlyIfSomeJobWorkersRun() { JobExecutionEngine underTest = new JobExecutionEngine(jobTriggerService, jobDefinitionService, eventBus, scheduleStrategies, jobTriggerUpdatesFactory, refreshingLockServiceFactory, jobFactory, workerPool, jobSchedulerConfig, metricRegistry); underTest.updateLockedJobs(); given(workerPool.anySlotsUsed()).willReturn(true); underTest.updateLockedJobs(); verify(jobTriggerService, times(1)).updateLockedJobTriggers(); }
static void execute(String... args) throws Exception { if (args.length != 5 && args.length != 6) { throw new TerseException("USAGE: java " + EndToEndLatency.class.getName() + " broker_list topic num_messages producer_acks message_size_bytes [optional] properties_file"); } String brokers = args[0]; String topic = args[1]; int numMessages = Integer.parseInt(args[2]); String acks = args[3]; int messageSizeBytes = Integer.parseInt(args[4]); Optional<String> propertiesFile = (args.length > 5 && !Utils.isBlank(args[5])) ? Optional.of(args[5]) : Optional.empty(); if (!Arrays.asList("1", "all").contains(acks)) { throw new IllegalArgumentException("Latency testing requires synchronous acknowledgement. Please use 1 or all"); } try (KafkaConsumer<byte[], byte[]> consumer = createKafkaConsumer(propertiesFile, brokers); KafkaProducer<byte[], byte[]> producer = createKafkaProducer(propertiesFile, brokers, acks)) { if (!consumer.listTopics().containsKey(topic)) { createTopic(propertiesFile, brokers, topic); } setupConsumer(topic, consumer); double totalTime = 0.0; long[] latencies = new long[numMessages]; Random random = new Random(0); for (int i = 0; i < numMessages; i++) { byte[] message = randomBytesOfLen(random, messageSizeBytes); long begin = System.nanoTime(); //Send message (of random bytes) synchronously then immediately poll for it producer.send(new ProducerRecord<>(topic, message)).get(); ConsumerRecords<byte[], byte[]> records = consumer.poll(Duration.ofMillis(POLL_TIMEOUT_MS)); long elapsed = System.nanoTime() - begin; validate(consumer, message, records); //Report progress if (i % 1000 == 0) System.out.println(i + "\t" + elapsed / 1000.0 / 1000.0); totalTime += elapsed; latencies[i] = elapsed / 1000 / 1000; } printResults(numMessages, totalTime, latencies); consumer.commitSync(); } }
@Test public void shouldFailWhenProducerAcksAreNotSynchronised() { String[] args = new String[] {"localhost:9092", "test", "10000", "0", "200"}; assertThrows(IllegalArgumentException.class, () -> EndToEndLatency.execute(args)); }
@Override public List<SnowflakeIdentifier> listSchemas(SnowflakeIdentifier scope) { StringBuilder baseQuery = new StringBuilder("SHOW SCHEMAS"); String[] queryParams = null; switch (scope.type()) { case ROOT: // account-level listing baseQuery.append(" IN ACCOUNT"); break; case DATABASE: // database-level listing baseQuery.append(" IN DATABASE IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; default: throw new IllegalArgumentException( String.format("Unsupported scope type for listSchemas: %s", scope)); } final String finalQuery = baseQuery.toString(); final String[] finalQueryParams = queryParams; List<SnowflakeIdentifier> schemas; try { schemas = connectionPool.run( conn -> queryHarness.query( conn, finalQuery, SCHEMA_RESULT_SET_HANDLER, finalQueryParams)); } catch (SQLException e) { throw snowflakeExceptionToIcebergException( scope, e, String.format("Failed to list schemas for scope '%s'", scope)); } catch (InterruptedException e) { throw new UncheckedInterruptedException( e, "Interrupted while listing schemas for scope '%s'", scope); } schemas.forEach( schema -> Preconditions.checkState( schema.type() == SnowflakeIdentifier.Type.SCHEMA, "Expected SCHEMA, got identifier '%s' for scope '%s'", schema, scope)); return schemas; }
@SuppressWarnings("unchecked") @Test public void testListSchemasInDatabase() throws SQLException { when(mockResultSet.next()).thenReturn(true).thenReturn(true).thenReturn(false); when(mockResultSet.getString("database_name")).thenReturn("DB_1").thenReturn("DB_1"); when(mockResultSet.getString("name")).thenReturn("SCHEMA_1").thenReturn("SCHEMA_2"); List<SnowflakeIdentifier> actualList = snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1")); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW SCHEMAS IN DATABASE IDENTIFIER(?)"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB_1")); assertThat(actualList) .containsExactly( SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_1"), SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_2")); }
public static Impl join(By clause) { return new Impl(new JoinArguments(clause)); }
@Test @Category(NeedsRunner.class) public void testInnerJoin() { List<Row> pc1Rows = Lists.newArrayList( Row.withSchema(CG_SCHEMA_1).addValues("user1", 1, "us").build(), Row.withSchema(CG_SCHEMA_1).addValues("user1", 2, "us").build(), Row.withSchema(CG_SCHEMA_1).addValues("user1", 3, "il").build(), Row.withSchema(CG_SCHEMA_1).addValues("user1", 4, "il").build(), Row.withSchema(CG_SCHEMA_1).addValues("user2", 5, "fr").build(), Row.withSchema(CG_SCHEMA_1).addValues("user2", 6, "fr").build(), Row.withSchema(CG_SCHEMA_1).addValues("user2", 7, "ar").build(), Row.withSchema(CG_SCHEMA_1).addValues("user2", 8, "ar").build()); List<Row> pc2Rows = Lists.newArrayList( Row.withSchema(CG_SCHEMA_2).addValues("user1", 9, "us").build(), Row.withSchema(CG_SCHEMA_2).addValues("user1", 10, "us").build(), Row.withSchema(CG_SCHEMA_2).addValues("user1", 11, "il").build(), Row.withSchema(CG_SCHEMA_2).addValues("user1", 12, "il").build(), Row.withSchema(CG_SCHEMA_2).addValues("user2", 13, "fr").build(), Row.withSchema(CG_SCHEMA_2).addValues("user2", 14, "fr").build(), Row.withSchema(CG_SCHEMA_2).addValues("user2", 15, "ar").build(), Row.withSchema(CG_SCHEMA_2).addValues("user2", 16, "ar").build()); List<Row> pc3Rows = Lists.newArrayList( Row.withSchema(CG_SCHEMA_3).addValues("user1", 17, "us").build(), Row.withSchema(CG_SCHEMA_3).addValues("user1", 18, "us").build(), Row.withSchema(CG_SCHEMA_3).addValues("user1", 19, "il").build(), Row.withSchema(CG_SCHEMA_3).addValues("user1", 20, "il").build(), Row.withSchema(CG_SCHEMA_3).addValues("user2", 21, "fr").build(), Row.withSchema(CG_SCHEMA_3).addValues("user2", 22, "fr").build(), Row.withSchema(CG_SCHEMA_3).addValues("user2", 23, "ar").build(), Row.withSchema(CG_SCHEMA_3).addValues("user2", 24, "ar").build()); PCollection<Row> pc1 = pipeline.apply("Create1", Create.of(pc1Rows)).setRowSchema(CG_SCHEMA_1); PCollection<Row> pc2 = pipeline.apply("Create2", Create.of(pc2Rows)).setRowSchema(CG_SCHEMA_2); PCollection<Row> pc3 = pipeline.apply("Create3", Create.of(pc3Rows)).setRowSchema(CG_SCHEMA_3); Schema expectedSchema = Schema.builder() .addRowField("pc1", CG_SCHEMA_1) .addRowField("pc2", CG_SCHEMA_2) .addRowField("pc3", CG_SCHEMA_3) .build(); PCollection<Row> joined = PCollectionTuple.of("pc1", pc1, "pc2", pc2, "pc3", pc3) .apply( "CoGroup", CoGroup.join("pc1", By.fieldNames("user", "country")) .join("pc2", By.fieldNames("user2", "country2")) .join("pc3", By.fieldNames("user3", "country3")) .crossProductJoin()); assertEquals(expectedSchema, joined.getSchema()); List<Row> expectedJoinedRows = JoinTestUtils.innerJoin( pc1Rows, pc2Rows, pc3Rows, new String[] {"user", "country"}, new String[] {"user2", "country2"}, new String[] {"user3", "country3"}, expectedSchema); PAssert.that(joined).containsInAnyOrder(expectedJoinedRows); pipeline.run(); }
public List<String> tokenize(String text) { List<String> tokens = new ArrayList<>(); Matcher regexMatcher = regexExpression.matcher(text); int lastIndexOfPrevMatch = 0; while (regexMatcher.find(lastIndexOfPrevMatch)) // this is where the magic happens: // the regexp is used to find a matching pattern for substitution { int beginIndexOfNextMatch = regexMatcher.start(); String prevToken = text.substring(lastIndexOfPrevMatch, beginIndexOfNextMatch); if (!prevToken.isEmpty()) { tokens.add(prevToken); } String currentMatch = regexMatcher.group(); tokens.add(currentMatch); lastIndexOfPrevMatch = regexMatcher.end(); if (lastIndexOfPrevMatch < text.length() && text.charAt(lastIndexOfPrevMatch) != '_') { // beause it is sometimes positioned after the "_", but it should be positioned // before the "_" --lastIndexOfPrevMatch; } } String tail = text.substring(lastIndexOfPrevMatch); if (!tail.isEmpty()) { tokens.add(tail); } return tokens; }
@Test void testTokenize_happyPath_8() { // given CompoundCharacterTokenizer tokenizer = new CompoundCharacterTokenizer( new HashSet<>(Arrays.asList("_100_101_102_", "_101_102_", "_103_104_"))); String text = "_100_101_102_103_104_"; // when List<String> tokens = tokenizer.tokenize(text); // then assertEquals(Arrays.asList("_100_101_102_", "_103_104_"), tokens); }
public String sprites() { return get(SPRITES, null); }
@Test public void setSprites() { loadLayout(L3); assertEquals("sprites not westminster", UK_LONDON_WEST, cfg.sprites()); cfg.sprites(NEW_SPR); assertEquals("not new sprites", NEW_SPR, cfg.sprites()); cfg.sprites(null); assertNull("sprites not cleared", cfg.sprites()); }
public static DataMap getAnnotationsMap(Annotation[] as) { return annotationsToData(as, true); }
@Test(description = "Non-RestSpecAnnotation annotation: annotation is not recorded") public void succeedsOnNonRestSpecAnnotation() { @NonRestSpecAnnotation class LocalClass { } final Annotation[] annotations = LocalClass.class.getAnnotations(); final DataMap actual = ResourceModelAnnotation.getAnnotationsMap(annotations); Assert.assertEquals(EMPTY_DATA_MAP, actual); }
@Override public Addresses loadAddresses(ClientConnectionProcessListenerRegistry listenerRunner) { List<String> configuredAddresses = networkConfig.getAddresses(); if (configuredAddresses.isEmpty()) { configuredAddresses.add("127.0.0.1"); } Addresses addresses = new Addresses(); List<Address> allAddresses = new ArrayList<>(); for (String address : configuredAddresses) { Addresses socketAddresses = AddressHelper.getSocketAddresses(address, listenerRunner); addresses.addAll(socketAddresses); } allAddresses.addAll(addresses.primary()); allAddresses.addAll(addresses.secondary()); listenerRunner.onPossibleAddressesCollected(allAddresses); return addresses; }
@Test public void whenExplicitPorts() throws UnknownHostException { ClientNetworkConfig config = new ClientNetworkConfig(); config.addAddress("10.0.0.1:5703"); config.addAddress("10.0.0.1:5702"); DefaultAddressProvider provider = new DefaultAddressProvider(config, () -> false); Addresses addresses = provider.loadAddresses(createConnectionProcessListenerRunner()); assertPrimary(addresses, new Address("10.0.0.1", 5703), new Address("10.0.0.1", 5702)); assertSecondaryEmpty(addresses); }
static <E extends Enum<E>> EnumSet<E> parseEventCodes( final Class<E> eventCodeType, final String eventCodes, final Map<String, EnumSet<E>> specialEvents, final IntFunction<E> eventCodeById, final Function<String, E> eventCodeByName) { if (Strings.isEmpty(eventCodes)) { return EnumSet.noneOf(eventCodeType); } final EnumSet<E> eventCodeSet = EnumSet.noneOf(eventCodeType); final String[] codeIds = eventCodes.split(","); for (final String codeId : codeIds) { final EnumSet<E> specialCodes = specialEvents.get(codeId); if (null != specialCodes) { eventCodeSet.addAll(specialCodes); } else { E code = null; try { code = eventCodeByName.apply(codeId); } catch (final IllegalArgumentException ignore) { } if (null == code) { try { code = eventCodeById.apply(Integer.parseInt(codeId)); } catch (final IllegalArgumentException ignore) { } } if (null != code) { eventCodeSet.add(code); } else { err.println("unknown event code: " + codeId); } } } return eventCodeSet; }
@Test void parseEventCodesShouldIgnoreInvalidEventCodes() { final PrintStream err = System.err; final ByteArrayOutputStream stderr = new ByteArrayOutputStream(); System.setErr(new PrintStream(stderr)); try { final EnumSet<TestEvent> parsedEvents = parseEventCodes( TestEvent.class, "A,FOO,2", Collections.emptyMap(), (i) -> TestEvent.values()[i], TestEvent::valueOf); assertEquals(EnumSet.of(TestEvent.FOO, TestEvent.BAZ), parsedEvents); assertThat(stderr.toString(), startsWith("unknown event code: A")); } finally { System.setErr(err); } }
@Override public Optional<String> getScmRevision() { if (!scmRevision.isInitialized()) { return Optional.empty(); } return Optional.ofNullable(scmRevision.getProperty()); }
@Test public void getScmRevision_returns_empty_if_scmRevision_is_not_initialized() { AnalysisMetadataHolderImpl underTest = new AnalysisMetadataHolderImpl(editionProvider); assertThat(underTest.getScmRevision()).isNotPresent(); }
Map<String, String> getShardIterators() { if (streamArn == null) { streamArn = getStreamArn(); } // Either return cached ones or get new ones via GetShardIterator requests. if (currentShardIterators.isEmpty()) { DescribeStreamResponse streamDescriptionResult = getClient().describeStream(DescribeStreamRequest.builder().streamArn(streamArn).build()); shardTree.populate(streamDescriptionResult.streamDescription().shards()); StreamIteratorType streamIteratorType = getEndpoint().getConfiguration().getStreamIteratorType(); currentShardIterators = getCurrentShardIterators(streamIteratorType); } else { Map<String, String> childShardIterators = new HashMap<>(); for (Entry<String, String> currentShardIterator : currentShardIterators.entrySet()) { List<Shard> children = shardTree.getChildren(currentShardIterator.getKey()); if (children.isEmpty()) { // This is still an active leaf shard, reuse it. childShardIterators.put(currentShardIterator.getKey(), currentShardIterator.getValue()); } else { for (Shard child : children) { // Inactive shard, move down to its children. String shardIterator = getShardIterator(child.shardId(), ShardIteratorType.TRIM_HORIZON); childShardIterators.put(child.shardId(), shardIterator); } } } currentShardIterators = childShardIterators; } LOG.trace("Shard Iterators are: {}", currentShardIterators); return currentShardIterators; }
@Test void shouldReturnLeafShardIterators() throws Exception { component.getConfiguration().setStreamIteratorType(StreamIteratorType.FROM_LATEST); Ddb2StreamEndpoint endpoint = (Ddb2StreamEndpoint) component.createEndpoint("aws2-ddbstreams://myTable"); ShardIteratorHandler underTest = new ShardIteratorHandler(endpoint); endpoint.doStart(); Map<String, String> expectedShardIterators = new HashMap<>(); expectedShardIterators.put(SHARD_3.shardId(), SHARD_ITERATOR_3); expectedShardIterators.put(SHARD_4.shardId(), SHARD_ITERATOR_4); expectedShardIterators.put(SHARD_5.shardId(), SHARD_ITERATOR_5); expectedShardIterators.put(SHARD_6.shardId(), SHARD_ITERATOR_6); assertEquals(expectedShardIterators, underTest.getShardIterators()); }
@SuppressWarnings("unchecked") @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { try { if (statement.getStatement() instanceof CreateAsSelect) { registerForCreateAs((ConfiguredStatement<? extends CreateAsSelect>) statement); } else if (statement.getStatement() instanceof CreateSource) { registerForCreateSource((ConfiguredStatement<? extends CreateSource>) statement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } // Remove schema id from SessionConfig return stripSchemaIdConfig(statement); }
@Test public void shouldThrowWrongValueFormatExceptionWithOverrideSchema() { // Given: final SchemaAndId schemaAndId = SchemaAndId.schemaAndId(SCHEMA.value(), AVRO_SCHEMA, 1); givenStatement("CREATE STREAM source (id int key, f1 varchar) " + "WITH (" + "kafka_topic='expectedName', " + "key_format='KAFKA', " + "value_format='JSON', " + "value_schema_id=1, " + "partitions=1" + ");", Pair.of(null, schemaAndId)); // When: final Exception e = assertThrows( KsqlStatementException.class, () -> injector.inject(statement) ); // Then: assertThat(e.getMessage(), containsString( "VALUE_SCHEMA_ID is provided but format JSON doesn't " + "support registering in Schema Registry")); }
public static List<String> getTableStrings(List<List<String>> table) { String formatString = getShortestTableStringFormat(table); return table.stream() .map(List::toArray) .map(line -> format(formatString, line)) .collect(toImmutableList()); }
@Test public void testGetTableStringsSimpleSuccess() { List<List<String>> table = Arrays.asList( Arrays.asList("Header1", "Header2", "Headr3"), Arrays.asList("Value1", "Value2", "Value3"), Arrays.asList("LongValue1", "SVal2", "SVal3")); assertEquals( StringTableUtils.getTableStrings(table), Arrays.asList( "| Header1 | Header2 | Headr3 |", "| Value1 | Value2 | Value3 |", "| LongValue1 | SVal2 | SVal3 |")); }
protected Map<String, GeneratedResources> compileFile(String fileName, ClassLoader parentClassLoader) { try { EfestoCompilationContext compilationContext = EfestoCompilationContextUtils.buildWithParentClassLoader(parentClassLoader); EfestoInputStreamResource toProcess = new EfestoInputStreamResource(documentResource.getInputStream(), fileName); compilationManager.processResource(compilationContext, toProcess); return compilationContext.getGeneratedResourcesMap(); } catch (Exception t) { String errorMessage = String.format("Compilation error for %s due to %s: please %s", fileName, t.getMessage(), CHECK_CLASSPATH); LOG.error(errorMessage); throw new KieCompilerServiceException(errorMessage, t); } }
@Test void compileFile() { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); Map<String, GeneratedResources> retrieved = dmnKiePMMLTrustyInvocationEvaluator.compileFile(pmmlFileName, classLoader); assertThat(retrieved).isNotNull().isNotEmpty().containsKey("pmml"); }
@Override public double p(int k) { if (k < 0) { return 0.0; } else { return Math.pow(1 - p, k) * p; } }
@Test public void testP() { System.out.println("p"); GeometricDistribution instance = new GeometricDistribution(0.3); instance.rand(); assertEquals(0.3, instance.p(0), 1E-6); assertEquals(0.21, instance.p(1), 1E-6); assertEquals(0.147, instance.p(2), 1E-6); assertEquals(0.1029, instance.p(3), 1E-6); assertEquals(0.07203, instance.p(4), 1E-6); assertEquals(0.008474257, instance.p(10), 1E-6); assertEquals(0.0002393768, instance.p(20), 1E-6); }
public CountDownLatch getCountDownLatch() { return countDownLatch; }
@Test void testPhaseWithError() throws InterruptedException { // CREATE client.pods().inNamespace("ns1") .create(new PodBuilder().withNewMetadata().withName("pod1").endMetadata().build()); // READ PodList podList = client.pods().inNamespace("ns1").list(); assertNotNull(podList); assertEquals(1, podList.getItems().size()); // WATCH PodPhaseWatcher podWatcher = new PodPhaseWatcher( phase -> StringUtils.equalsAnyIgnoreCase(phase, "Succeeded", "Failed", "Running")); Watch watch = client.pods().inNamespace("ns1").withName("pod1").watch(podWatcher); // In the case of close, we do not block thread execution watch.close(); assertTrue(podWatcher.getCountDownLatch().await(1, TimeUnit.SECONDS)); }
@Override public void finished(boolean allStepsExecuted) { if (postProjectAnalysisTasks.length == 0) { return; } ProjectAnalysisImpl projectAnalysis = createProjectAnalysis(allStepsExecuted ? SUCCESS : FAILED); for (PostProjectAnalysisTask postProjectAnalysisTask : postProjectAnalysisTasks) { executeTask(projectAnalysis, postProjectAnalysisTask); } }
@Test @UseDataProvider("booleanValues") public void finished_calls_all_PostProjectAnalysisTask_in_order_of_the_array_and_passes_the_same_object_to_all(boolean allStepsExecuted) { PostProjectAnalysisTask postProjectAnalysisTask1 = newPostProjectAnalysisTask("PT1"); PostProjectAnalysisTask postProjectAnalysisTask2 = newPostProjectAnalysisTask("PT2"); InOrder inOrder = inOrder(postProjectAnalysisTask1, postProjectAnalysisTask2); new PostProjectAnalysisTasksExecutor( ceTask, analysisMetadataHolder, qualityGateHolder, qualityGateStatusHolder, reportReader, new PostProjectAnalysisTask[] {postProjectAnalysisTask1, postProjectAnalysisTask2}) .finished(allStepsExecuted); inOrder.verify(postProjectAnalysisTask1).finished(taskContextCaptor.capture()); inOrder.verify(postProjectAnalysisTask1).getDescription(); inOrder.verify(postProjectAnalysisTask2).finished(taskContextCaptor.capture()); inOrder.verify(postProjectAnalysisTask2).getDescription(); inOrder.verifyNoMoreInteractions(); ArgumentCaptor<PostProjectAnalysisTask.Context> taskContextCaptor = this.taskContextCaptor; List<PostProjectAnalysisTask.ProjectAnalysis> allValues = getAllProjectAnalyses(taskContextCaptor); assertThat(allValues).hasSize(2); assertThat(allValues.get(0)).isSameAs(allValues.get(1)); assertThat(logTester.logs()).hasSize(2); List<String> logs = logTester.logs(Level.INFO); assertThat(logs).hasSize(2); assertThat(logs.get(0)).matches("^PT1 \\| status=SUCCESS \\| time=\\d+ms$"); assertThat(logs.get(1)).matches("^PT2 \\| status=SUCCESS \\| time=\\d+ms$"); }
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) { CharStream input = CharStreams.fromString(source); FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input ); CommonTokenStream tokens = new CommonTokenStream( lexer ); FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens ); ParserHelper parserHelper = new ParserHelper(eventsManager); additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol())); parser.setHelper(parserHelper); parser.setErrorHandler( new FEELErrorHandler() ); parser.removeErrorListeners(); // removes the error listener that prints to the console parser.addErrorListener( new FEELParserErrorListener( eventsManager ) ); // pre-loads the parser with symbols defineVariables( inputVariableTypes, inputVariables, parser ); if (typeRegistry != null) { parserHelper.setTypeRegistry(typeRegistry); } return parser; }
@Test void conditionalLogicalOp() { String inputExpression = "foo < 10 and bar = \"x\" or baz"; BaseNode infix = parse( inputExpression ); assertThat( infix).isInstanceOf(InfixOpNode.class); assertThat( infix.getResultType()).isEqualTo(BuiltInType.BOOLEAN); assertThat( infix.getText()).isEqualTo(inputExpression); InfixOpNode or = (InfixOpNode) infix; assertThat( or.getLeft()).isInstanceOf(InfixOpNode.class); assertThat( or.getLeft().getText()).isEqualTo( "foo < 10 and bar = \"x\""); assertThat( or.getOperator()).isEqualTo(InfixOperator.OR); assertThat( or.getRight()).isInstanceOf(NameRefNode.class); assertThat( or.getRight().getText()).isEqualTo("baz"); InfixOpNode and = (InfixOpNode) or.getLeft(); assertThat( and.getLeft()).isInstanceOf(InfixOpNode.class); assertThat( and.getLeft().getText()).isEqualTo( "foo < 10"); assertThat( and.getOperator()).isEqualTo(InfixOperator.AND); assertThat( and.getRight()).isInstanceOf(InfixOpNode.class); assertThat( and.getRight().getText()).isEqualTo( "bar = \"x\""); }
protected void patchDualStackNetworking(Service current, Service desired) { desired.getSpec().setIpFamilies(current.getSpec().getIpFamilies()); if (desired.getSpec().getIpFamilyPolicy() == null) { desired.getSpec().setIpFamilyPolicy(current.getSpec().getIpFamilyPolicy()); } }
@Test public void testDualStackNetworkingPatching() { KubernetesClient client = mock(KubernetesClient.class); Service current = new ServiceBuilder() .withNewMetadata() .withNamespace(NAMESPACE) .withName(RESOURCE_NAME) .endMetadata() .withNewSpec() .withType("ClusterIp") .withPorts( new ServicePortBuilder() .withName("port1") .withPort(1234) .withTargetPort(new IntOrString(1234)) .build(), new ServicePortBuilder() .withName("port2") .withPort(5678) .withTargetPort(new IntOrString(5678)) .build() ) .withIpFamilyPolicy("SingleStack") .withIpFamilies("IPv6") .endSpec() .build(); Service current2 = new ServiceBuilder() .withNewMetadata() .withNamespace(NAMESPACE) .withName(RESOURCE_NAME) .endMetadata() .withNewSpec() .withType("ClusterIp") .withPorts( new ServicePortBuilder() .withName("port1") .withPort(1234) .withTargetPort(new IntOrString(1234)) .build(), new ServicePortBuilder() .withName("port2") .withPort(5678) .withTargetPort(new IntOrString(5678)) .build() ) .withIpFamilyPolicy("PreferDualStack") .withIpFamilies("IPv4", "IPv6") .endSpec() .build(); Service desired = new ServiceBuilder() .withNewMetadata() .withNamespace(NAMESPACE) .withName(RESOURCE_NAME) .endMetadata() .withNewSpec() .withType("NodePort") .withPorts( new ServicePortBuilder() .withName("port2") .withPort(5678) .withTargetPort(new IntOrString(5678)) .build(), new ServicePortBuilder() .withName("port1") .withPort(1234) .withTargetPort(new IntOrString(1234)) .build() ) .endSpec() .build(); Service desired2 = new ServiceBuilder() .withNewMetadata() .withNamespace(NAMESPACE) .withName(RESOURCE_NAME) .endMetadata() .withNewSpec() .withType("NodePort") .withPorts( new ServicePortBuilder() .withName("port2") .withPort(5678) .withTargetPort(new IntOrString(5678)) .build(), new ServicePortBuilder() .withName("port1") .withPort(1234) .withTargetPort(new IntOrString(1234)) .build() ) .withIpFamilyPolicy("RequireDualStack") .withIpFamilies("IPv4", "IPv6") .endSpec() .build(); ServiceOperator op = new ServiceOperator(vertx, client); op.patchDualStackNetworking(current, desired); assertThat(current.getSpec().getIpFamilyPolicy(), is(desired.getSpec().getIpFamilyPolicy())); assertThat(current.getSpec().getIpFamilies(), is(desired.getSpec().getIpFamilies())); op.patchDualStackNetworking(current2, desired); assertThat(current2.getSpec().getIpFamilyPolicy(), is(not(desired.getSpec().getIpFamilyPolicy()))); assertThat(current2.getSpec().getIpFamilies(), is(desired.getSpec().getIpFamilies())); op.patchDualStackNetworking(current, desired2); assertThat(current.getSpec().getIpFamilyPolicy(), is(not(desired2.getSpec().getIpFamilyPolicy()))); assertThat(current.getSpec().getIpFamilies(), is(desired2.getSpec().getIpFamilies())); op.patchDualStackNetworking(current2, desired2); assertThat(current2.getSpec().getIpFamilyPolicy(), is(not(desired2.getSpec().getIpFamilyPolicy()))); assertThat(current2.getSpec().getIpFamilies(), is(desired2.getSpec().getIpFamilies())); }
public Partition getPartition(String dbName, String tblName, List<String> partitionValues) { return get(partitionCache, HivePartitionName.of(dbName, tblName, partitionValues)); }
@Test public void testGetPartition() { CachingHiveMetastore cachingHiveMetastore = new CachingHiveMetastore( metastore, executor, expireAfterWriteSec, refreshAfterWriteSec, 1000, false); Partition partition = cachingHiveMetastore.getPartition( "db1", "tbl1", Lists.newArrayList("par1")); Assert.assertEquals(ORC, partition.getFileFormat()); Assert.assertEquals("100", partition.getParameters().get(TOTAL_SIZE)); partition = metastore.getPartition("db1", "tbl1", Lists.newArrayList()); Assert.assertEquals("100", partition.getParameters().get(TOTAL_SIZE)); }
public static Map<String, Field> getBeanPropertyFields(Class cl) { Map<String, Field> properties = new HashMap<>(); for (; cl != null; cl = cl.getSuperclass()) { Field[] fields = cl.getDeclaredFields(); for (Field field : fields) { if (Modifier.isTransient(field.getModifiers()) || Modifier.isStatic(field.getModifiers())) { continue; } field.setAccessible(true); properties.put(field.getName(), field); } } return properties; }
@Test void testGetBeanPropertyFields() { Map<String, Field> map = ReflectUtils.getBeanPropertyFields(EmptyClass.class); assertThat(map.size(), is(2)); assertThat(map, hasKey("set")); assertThat(map, hasKey("property")); for (Field f : map.values()) { if (!f.isAccessible()) { fail(); } } }
@Override public BigDecimal getBigDecimal(final int columnIndex) throws SQLException { return (BigDecimal) ResultSetUtils.convertValue(mergeResultSet.getValue(columnIndex, BigDecimal.class), BigDecimal.class); }
@Test void assertGetBigDecimalAndScaleWithColumnIndex() throws SQLException { when(mergeResultSet.getValue(1, BigDecimal.class)).thenReturn(new BigDecimal("1")); assertThat(shardingSphereResultSet.getBigDecimal(1, 10), is(new BigDecimal("1"))); }
private void addSubscriberIndexes(Service service, String clientId) { Set<String> clientIds = subscriberIndexes.computeIfAbsent(service, key -> new ConcurrentHashSet<>()); // Fix #5404, Only first time add need notify event. if (clientIds.add(clientId)) { NotifyCenter.publishEvent(new ServiceEvent.ServiceSubscribedEvent(service, clientId)); } }
@Test void testAddSubscriberIndexes() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { String clientId = "clientId"; Class<ClientServiceIndexesManager> clientServiceIndexesManagerClass = ClientServiceIndexesManager.class; Method addSubscriberIndexes = clientServiceIndexesManagerClass.getDeclaredMethod("addSubscriberIndexes", Service.class, String.class); addSubscriberIndexes.setAccessible(true); addSubscriberIndexes.invoke(clientServiceIndexesManager, service, clientId); Collection<String> allClientsSubscribeService = clientServiceIndexesManager.getAllClientsSubscribeService(service); assertNotNull(allClientsSubscribeService); assertEquals(2, allClientsSubscribeService.size()); }
public static List<ReservationAllocationState> convertAllocationsToReservationInfo(Set<ReservationAllocation> res, boolean includeResourceAllocations) { List<ReservationAllocationState> reservationInfo = new ArrayList<>(); Map<ReservationInterval, Resource> requests; for (ReservationAllocation allocation : res) { List<ResourceAllocationRequest> allocations = new ArrayList<>(); if (includeResourceAllocations) { requests = allocation.getAllocationRequests(); for (Map.Entry<ReservationInterval, Resource> request : requests.entrySet()) { ReservationInterval interval = request.getKey(); allocations.add(ResourceAllocationRequest.newInstance( interval.getStartTime(), interval.getEndTime(), request.getValue())); } } reservationInfo.add(ReservationAllocationState.newInstance( allocation.getAcceptanceTime(), allocation.getUser(), allocations, allocation.getReservationId(), allocation.getReservationDefinition())); } return reservationInfo; }
@Test public void testConvertAllocationsToReservationInfoEmptyAllocations() { long startTime = new Date().getTime(); long step = 10000; int[] alloc = {}; ReservationId id = ReservationSystemTestUtil.getNewReservationId(); ReservationAllocation allocation = createReservationAllocation( startTime, startTime + 10 * step, step, alloc, id, createResource(4000, 2)); List<ReservationAllocationState> infoList = ReservationSystemUtil .convertAllocationsToReservationInfo( Collections.singleton(allocation), false); Assert.assertEquals(infoList.size(), 1); Assert.assertEquals(infoList.get(0).getReservationId().toString(), id.toString()); Assert.assertTrue(infoList.get(0).getResourceAllocationRequests() .isEmpty()); }
@Override public float floatValue() { return value; }
@Test void testVerySmallValues() throws IOException { double smallValue = Float.MIN_VALUE / 10d; assertEquals(-1, Double.compare(smallValue, Float.MIN_VALUE), "Test must be performed with a value smaller than Float.MIN_VALUE."); // 1.4012984643248171E-46 String asString = String.valueOf(smallValue); COSFloat cosFloat = new COSFloat(asString); assertEquals(0.0f, cosFloat.floatValue()); // 0.00000000000000000000000000000000000000000000014012984643248171 asString = new BigDecimal(asString).toPlainString(); cosFloat = new COSFloat(asString); assertEquals(0.0f, cosFloat.floatValue()); smallValue *= -1; // -1.4012984643248171E-46 asString = String.valueOf(smallValue); cosFloat = new COSFloat(asString); assertEquals(0.0f, cosFloat.floatValue()); // -0.00000000000000000000000000000000000000000000014012984643248171 asString = new BigDecimal(asString).toPlainString(); cosFloat = new COSFloat(asString); assertEquals(0.0f, cosFloat.floatValue()); }
@Override public HttpRequest transformRequest(HttpRequest request, ServiceInstance instance) { if (instance != null) { MetadataContextHolder.get().setLoadbalancer(LOAD_BALANCER_SERVICE_INSTANCE, instance); } return request; }
@Test public void test() throws Throwable { transformer.transformRequest(clientRequest, serviceInstance); assertThat(MetadataContextHolder.get().getLoadbalancerMetadata().get(LOAD_BALANCER_SERVICE_INSTANCE)).isEqualTo(serviceInstance); }
public void setPrefix(String prefix) { this.prefix = prefix; }
@Test public void customMetricsPrefix() throws Exception { iqtp.setPrefix(PREFIX); iqtp.start(); assertThat(metricRegistry.getNames()) .overridingErrorMessage("Custom metrics prefix doesn't match") .allSatisfy(name -> assertThat(name).startsWith(PREFIX)); iqtp.stop(); assertThat(metricRegistry.getMetrics()) .overridingErrorMessage("The default metrics prefix was changed") .isEmpty(); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ShowSQLTranslatorRuleStatement sqlStatement, final ContextManager contextManager) { SQLTranslatorRuleConfiguration ruleConfig = rule.getConfiguration(); return Collections.singleton(new LocalDataQueryResultRow(ruleConfig.getType(), ruleConfig.getProps(), ruleConfig.isUseOriginalSQLWhenTranslatingFailed())); }
@Test void assertExecute() throws SQLException { engine.executeQuery(); Collection<LocalDataQueryResultRow> actual = engine.getRows(); assertThat(actual.size(), is(1)); Iterator<LocalDataQueryResultRow> iterator = actual.iterator(); LocalDataQueryResultRow row = iterator.next(); assertThat(row.getCell(1), is("NATIVE")); assertThat(row.getCell(2), is("")); assertThat(row.getCell(3), is("true")); }
String getEntryName( String name ) { return "${" + Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY + "}/" + name; }
@Test public void testEntryName() { dialog = mock( JobEntryTransDialog.class ); doCallRealMethod().when( dialog ).getEntryName( any() ); assertEquals( dialog.getEntryName( FILE_NAME ), "${Internal.Entry.Current.Directory}/" + FILE_NAME ); }
public static String get(String urlString, Charset customCharset) { return HttpRequest.get(urlString).charset(customCharset).execute().body(); }
@Test @Disabled public void getTest() { final String result1 = HttpUtil.get("http://photo.qzone.qq.com/fcgi-bin/fcg_list_album?uin=88888&outstyle=2", CharsetUtil.CHARSET_GBK); Console.log(result1); }
@Subscribe public void onChatMessage(ChatMessage chatMessage) { if (chatMessage.getType() != ChatMessageType.TRADE && chatMessage.getType() != ChatMessageType.GAMEMESSAGE && chatMessage.getType() != ChatMessageType.SPAM && chatMessage.getType() != ChatMessageType.FRIENDSCHATNOTIFICATION) { return; } String message = chatMessage.getMessage(); Matcher matcher = KILLCOUNT_PATTERN.matcher(message); if (matcher.find()) { final String boss = matcher.group("boss"); final int kc = Integer.parseInt(matcher.group("kc")); final String pre = matcher.group("pre"); final String post = matcher.group("post"); if (Strings.isNullOrEmpty(pre) && Strings.isNullOrEmpty(post)) { unsetKc(boss); return; } String renamedBoss = KILLCOUNT_RENAMES .getOrDefault(boss, boss) // The config service doesn't support keys with colons in them .replace(":", ""); if (boss != renamedBoss) { // Unset old TOB kc unsetKc(boss); unsetPb(boss); unsetKc(boss.replace(":", ".")); unsetPb(boss.replace(":", ".")); // Unset old story mode unsetKc("Theatre of Blood Story Mode"); unsetPb("Theatre of Blood Story Mode"); } setKc(renamedBoss, kc); // We either already have the pb, or need to remember the boss for the upcoming pb if (lastPb > -1) { log.debug("Got out-of-order personal best for {}: {}", renamedBoss, lastPb); if (renamedBoss.contains("Theatre of Blood")) { // TOB team size isn't sent in the kill message, but can be computed from varbits int tobTeamSize = tobTeamSize(); lastTeamSize = tobTeamSize == 1 ? "Solo" : (tobTeamSize + " players"); } else if (renamedBoss.contains("Tombs of Amascut")) { // TOA team size isn't sent in the kill message, but can be computed from varbits int toaTeamSize = toaTeamSize(); lastTeamSize = toaTeamSize == 1 ? "Solo" : (toaTeamSize + " players"); } final double pb = getPb(renamedBoss); // If a raid with a team size, only update the pb if it is lower than the existing pb // so that the pb is the overall lowest of any team size if (lastTeamSize == null || pb == 0 || lastPb < pb) { log.debug("Setting overall pb (old: {})", pb); setPb(renamedBoss, lastPb); } if (lastTeamSize != null) { log.debug("Setting team size pb: {}", lastTeamSize); setPb(renamedBoss + " " + lastTeamSize, lastPb); } lastPb = -1; lastTeamSize = null; } else { lastBossKill = renamedBoss; lastBossTime = client.getTickCount(); } return; } matcher = DUEL_ARENA_WINS_PATTERN.matcher(message); if (matcher.find()) { final int oldWins = getKc("Duel Arena Wins"); final int wins = matcher.group(2).equals("one") ? 1 : Integer.parseInt(matcher.group(2).replace(",", "")); final String result = matcher.group(1); int winningStreak = getKc("Duel Arena Win Streak"); int losingStreak = getKc("Duel Arena Lose Streak"); if (result.equals("won") && wins > oldWins) { losingStreak = 0; winningStreak += 1; } else if (result.equals("were defeated")) { losingStreak += 1; winningStreak = 0; } else { log.warn("unrecognized duel streak chat message: {}", message); } setKc("Duel Arena Wins", wins); setKc("Duel Arena Win Streak", winningStreak); setKc("Duel Arena Lose Streak", losingStreak); } matcher = DUEL_ARENA_LOSSES_PATTERN.matcher(message); if (matcher.find()) { int losses = matcher.group(1).equals("one") ? 1 : Integer.parseInt(matcher.group(1).replace(",", "")); setKc("Duel Arena Losses", losses); } matcher = KILL_DURATION_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = NEW_PB_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = RAIDS_PB_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = RAIDS_DURATION_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = HS_PB_PATTERN.matcher(message); if (matcher.find()) { int floor = Integer.parseInt(matcher.group("floor")); String floortime = matcher.group("floortime"); String floorpb = matcher.group("floorpb"); String otime = matcher.group("otime"); String opb = matcher.group("opb"); String pb = MoreObjects.firstNonNull(floorpb, floortime); setPb("Hallowed Sepulchre Floor " + floor, timeStringToSeconds(pb)); if (otime != null) { pb = MoreObjects.firstNonNull(opb, otime); setPb("Hallowed Sepulchre", timeStringToSeconds(pb)); } } matcher = HS_KC_FLOOR_PATTERN.matcher(message); if (matcher.find()) { int floor = Integer.parseInt(matcher.group(1)); int kc = Integer.parseInt(matcher.group(2).replaceAll(",", "")); setKc("Hallowed Sepulchre Floor " + floor, kc); } matcher = HS_KC_GHC_PATTERN.matcher(message); if (matcher.find()) { int kc = Integer.parseInt(matcher.group(1).replaceAll(",", "")); setKc("Hallowed Sepulchre", kc); } matcher = HUNTER_RUMOUR_KC_PATTERN.matcher(message); if (matcher.find()) { int kc = Integer.parseInt(matcher.group(1).replaceAll(",", "")); setKc("Hunter Rumours", kc); } if (lastBossKill != null && lastBossTime != client.getTickCount()) { lastBossKill = null; lastBossTime = -1; } matcher = COLLECTION_LOG_ITEM_PATTERN.matcher(message); if (matcher.find()) { String item = matcher.group(1); int petId = findPet(item); if (petId != -1) { final List<Integer> petList = new ArrayList<>(getPetList()); if (!petList.contains(petId)) { log.debug("New pet added: {}/{}", item, petId); petList.add(petId); setPetList(petList); } } } matcher = GUARDIANS_OF_THE_RIFT_PATTERN.matcher(message); if (matcher.find()) { int kc = Integer.parseInt(matcher.group(1)); setKc("Guardians of the Rift", kc); } }
@Test public void testHsOverallNoPb_Pb() { ChatMessage chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Floor 5 time: <col=ff0000>3:10</col>. Personal best: 3:04<br>Overall time: <col=ff0000>7:47</col> (new personal best)<br>", null, 0); chatCommandsPlugin.onChatMessage(chatMessage); verify(configManager).setRSProfileConfiguration("personalbest", "hallowed sepulchre floor 5", 3 * 60 + 4.0); verify(configManager).setRSProfileConfiguration("personalbest", "hallowed sepulchre", 7 * 60 + 47.0); // Precise times chatMessage = new ChatMessage(null, GAMEMESSAGE, "", "Floor 5 time: <col=ff0000>3:10.00</col>. Personal best: 3:04.40<br>Overall time: <col=ff0000>7:47.20</col> (new personal best)<br>", null, 0); chatCommandsPlugin.onChatMessage(chatMessage); verify(configManager).setRSProfileConfiguration("personalbest", "hallowed sepulchre floor 5", 3 * 60 + 4.4); verify(configManager).setRSProfileConfiguration("personalbest", "hallowed sepulchre", 7 * 60 + 47.2); }
public static String getConsumerAddressNum(ConsumerModel consumerModel) { int num = 0; Object object = consumerModel.getServiceMetadata().getAttribute(CommonConstants.CURRENT_CLUSTER_INVOKER_KEY); Map<Registry, MigrationInvoker<?>> invokerMap; List<String> nums = new LinkedList<>(); if (object instanceof Map) { invokerMap = (Map<Registry, MigrationInvoker<?>>) object; for (Map.Entry<Registry, MigrationInvoker<?>> entry : invokerMap.entrySet()) { URL registryUrl = entry.getKey().getUrl(); boolean isServiceDiscovery = UrlUtils.isServiceDiscoveryURL(registryUrl); String protocol = isServiceDiscovery ? registryUrl.getParameter(RegistryConstants.REGISTRY_KEY) : registryUrl.getProtocol(); MigrationInvoker<?> migrationInvoker = entry.getValue(); MigrationStep migrationStep = migrationInvoker.getMigrationStep(); String interfaceSize = Optional.ofNullable(migrationInvoker.getInvoker()) .map(ClusterInvoker::getDirectory) .map(Directory::getAllInvokers) .map(List::size) .map(String::valueOf) .orElse("-"); String applicationSize = Optional.ofNullable(migrationInvoker.getServiceDiscoveryInvoker()) .map(ClusterInvoker::getDirectory) .map(Directory::getAllInvokers) .map(List::size) .map(String::valueOf) .orElse("-"); String step; String size; switch (migrationStep) { case APPLICATION_FIRST: step = "AF"; size = "I-" + interfaceSize + ",A-" + applicationSize; break; case FORCE_INTERFACE: step = "I"; size = interfaceSize; break; default: step = "A"; size = applicationSize; break; } // zookeeper-AF(I-10,A-0) // zookeeper-I(10) // zookeeper-A(10) nums.add(protocol + "-" + step + "(" + size + ")"); } } // zookeeper-AF(I-10,A-0)/nacos-I(10) return String.join("/", nums.toArray(new String[0])); }
@Test void testGetConsumerAddressNum() { ConsumerModel consumerModel = Mockito.mock(ConsumerModel.class); ServiceMetadata serviceMetadata = Mockito.mock(ServiceMetadata.class); Mockito.when(consumerModel.getServiceMetadata()).thenReturn(serviceMetadata); String registry1 = "service-discovery-registry://127.0.0.1:2181/org.apache.dubbo.registry.RegistryService?application=dubbo-demo-api-provider&dubbo=2.0.2&pid=66099&registry=zookeeper&timestamp=1654588337653"; String registry2 = "zookeeper://127.0.0.1:2181/org.apache.dubbo.registry.RegistryService?application=dubbo-demo-api-provider&dubbo=2.0.2&pid=66099&timestamp=1654588337653"; String registry3 = "nacos://127.0.0.1:8848/org.apache.dubbo.registry.RegistryService?application=dubbo-demo-api-provider&dubbo=2.0.2&pid=66099&timestamp=1654588337653"; Map<Registry, MigrationInvoker<?>> invokerMap = new LinkedHashMap<>(); { Registry registry = Mockito.mock(Registry.class); Mockito.when(registry.getUrl()).thenReturn(URL.valueOf(registry1)); MigrationInvoker<?> migrationInvoker = Mockito.mock(MigrationInvoker.class); Mockito.when(migrationInvoker.getMigrationStep()).thenReturn(MigrationStep.FORCE_APPLICATION); ClusterInvoker serviceDiscoveryInvoker = Mockito.mock(ClusterInvoker.class); Mockito.when(migrationInvoker.getServiceDiscoveryInvoker()).thenReturn(serviceDiscoveryInvoker); Directory<?> sdDirectory = Mockito.mock(Directory.class); Mockito.when(serviceDiscoveryInvoker.getDirectory()).thenReturn(sdDirectory); List sdInvokers = Mockito.mock(List.class); Mockito.when(sdDirectory.getAllInvokers()).thenReturn(sdInvokers); Mockito.when(sdInvokers.size()).thenReturn(5); invokerMap.put(registry, migrationInvoker); } { Registry registry = Mockito.mock(Registry.class); Mockito.when(registry.getUrl()).thenReturn(URL.valueOf(registry2)); MigrationInvoker<?> migrationInvoker = Mockito.mock(MigrationInvoker.class); Mockito.when(migrationInvoker.getMigrationStep()).thenReturn(MigrationStep.APPLICATION_FIRST); ClusterInvoker serviceDiscoveryInvoker = Mockito.mock(ClusterInvoker.class); Mockito.when(migrationInvoker.getServiceDiscoveryInvoker()).thenReturn(serviceDiscoveryInvoker); Directory<?> sdDirectory = Mockito.mock(Directory.class); Mockito.when(serviceDiscoveryInvoker.getDirectory()).thenReturn(sdDirectory); List sdInvokers = Mockito.mock(List.class); Mockito.when(sdDirectory.getAllInvokers()).thenReturn(sdInvokers); Mockito.when(sdInvokers.size()).thenReturn(0); ClusterInvoker invoker = Mockito.mock(ClusterInvoker.class); Mockito.when(migrationInvoker.getInvoker()).thenReturn(invoker); Directory<?> directory = Mockito.mock(Directory.class); Mockito.when(invoker.getDirectory()).thenReturn(directory); List invokers = Mockito.mock(List.class); Mockito.when(directory.getAllInvokers()).thenReturn(invokers); Mockito.when(invokers.size()).thenReturn(10); invokerMap.put(registry, migrationInvoker); } { Registry registry = Mockito.mock(Registry.class); Mockito.when(registry.getUrl()).thenReturn(URL.valueOf(registry3)); MigrationInvoker<?> migrationInvoker = Mockito.mock(MigrationInvoker.class); Mockito.when(migrationInvoker.getMigrationStep()).thenReturn(MigrationStep.FORCE_INTERFACE); ClusterInvoker invoker = Mockito.mock(ClusterInvoker.class); Mockito.when(migrationInvoker.getInvoker()).thenReturn(invoker); Directory<?> directory = Mockito.mock(Directory.class); Mockito.when(invoker.getDirectory()).thenReturn(directory); List invokers = Mockito.mock(List.class); Mockito.when(directory.getAllInvokers()).thenReturn(invokers); Mockito.when(invokers.size()).thenReturn(10); invokerMap.put(registry, migrationInvoker); } Mockito.when(serviceMetadata.getAttribute("currentClusterInvoker")).thenReturn(invokerMap); assertEquals( "zookeeper-A(5)/zookeeper-AF(I-10,A-0)/nacos-I(10)", ServiceCheckUtils.getConsumerAddressNum(consumerModel)); }
@Override public HttpServletRequest readRequest(AwsProxyRequest request, SecurityContext securityContext, Context lambdaContext, ContainerConfig config) throws InvalidRequestEventException { // Expect the HTTP method and context to be populated. If they are not, we are handling an // unsupported event type. if (request.getHttpMethod() == null || request.getHttpMethod().equals("") || request.getRequestContext() == null) { throw new InvalidRequestEventException(INVALID_REQUEST_ERROR); } request.setPath(stripBasePath(request.getPath(), config)); if (request.getMultiValueHeaders() != null && request.getMultiValueHeaders().getFirst(HttpHeaders.CONTENT_TYPE) != null) { String contentType = request.getMultiValueHeaders().getFirst(HttpHeaders.CONTENT_TYPE); // put single as we always expect to have one and only one content type in a request. request.getMultiValueHeaders().putSingle(HttpHeaders.CONTENT_TYPE, getContentTypeWithCharset(contentType, config)); } AwsProxyHttpServletRequest servletRequest = new AwsProxyHttpServletRequest(request, lambdaContext, securityContext, config); servletRequest.setServletContext(servletContext); servletRequest.setAttribute(API_GATEWAY_CONTEXT_PROPERTY, request.getRequestContext()); servletRequest.setAttribute(API_GATEWAY_STAGE_VARS_PROPERTY, request.getStageVariables()); servletRequest.setAttribute(API_GATEWAY_EVENT_PROPERTY, request); servletRequest.setAttribute(ALB_CONTEXT_PROPERTY, request.getRequestContext().getElb()); servletRequest.setAttribute(LAMBDA_CONTEXT_PROPERTY, lambdaContext); servletRequest.setAttribute(JAX_SECURITY_CONTEXT_PROPERTY, securityContext); return servletRequest; }
@Test void readRequest_contentCharset_doesNotOverrideRequestCharset() { String requestCharset = "application/json; charset=UTF-8"; AwsProxyRequest request = new AwsProxyRequestBuilder(ENCODED_REQUEST_PATH, "GET").header(HttpHeaders.CONTENT_TYPE, requestCharset).build(); try { HttpServletRequest servletRequest = reader.readRequest(request, null, null, ContainerConfig.defaultConfig()); assertNotNull(servletRequest); assertNotNull(servletRequest.getHeader(HttpHeaders.CONTENT_TYPE)); assertEquals(requestCharset, servletRequest.getHeader(HttpHeaders.CONTENT_TYPE)); assertEquals("UTF-8", servletRequest.getCharacterEncoding()); } catch (InvalidRequestEventException e) { e.printStackTrace(); fail("Could not read request"); } }
@Override public X process(T input, Context context) throws Exception { if (!this.initialized) { initialize(context); } // record must be PulsarFunctionRecord. Record<T> record = (Record<T>) context.getCurrentRecord(); // windows function processing semantics requires separate processing if (windowConfig.getProcessingGuarantees() == WindowConfig.ProcessingGuarantees.ATMOST_ONCE) { record.ack(); } if (isEventTime()) { long ts = this.timestampExtractor.extractTimestamp(record.getValue()); if (this.waterMarkEventGenerator.track(record.getTopicName().get(), ts)) { this.windowManager.add(record, ts, record); } else { if (this.windowConfig.getLateDataTopic() != null) { context.newOutputMessage(this.windowConfig.getLateDataTopic(), null).value(input).sendAsync(); } else { log.info(String.format( "Received a late tuple %s with ts %d. This will not be " + "processed" + ".", input, ts)); } } } else { this.windowManager.add(record, System.currentTimeMillis(), record); } return null; }
@Test public void testWindowFunctionWithAtmostOnce() throws Exception { windowConfig.setProcessingGuarantees(WindowConfig.ProcessingGuarantees.ATMOST_ONCE); doReturn(Optional.of(new Gson().fromJson(new Gson().toJson(windowConfig), Map.class))).when(context) .getUserConfigValue(WindowConfig.WINDOW_CONFIG_KEY); Record record = mock(Record.class); when(context.getCurrentRecord()).thenReturn(record); doReturn(Optional.of("test-topic")).when(record).getTopicName(); doReturn(record).when(context).getCurrentRecord(); doReturn(100l).when(record).getValue(); testWindowedPulsarFunction.process(10L, context); verify(record, times(1)).ack(); }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "list" ) List list) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } FEELFnResult<BigDecimal> s = sum.invoke( list ); Function<FEELEvent, FEELFnResult<BigDecimal>> ifLeft = event -> FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "unable to sum the elements which is required to calculate the mean")); Function<BigDecimal, FEELFnResult<BigDecimal>> ifRight = (sum) -> { try { return FEELFnResult.ofResult( sum.divide( BigDecimal.valueOf( list.size() ), MathContext.DECIMAL128 ) ); } catch (Exception e) { return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to perform division to calculate the mean", e) ); } }; return s.cata(ifLeft, ifRight); }
@Test void invokeArrayTypeHeterogenous() { FunctionTestUtil.assertResultError(meanFunction.invoke(new Object[]{1, "test"}), InvalidParametersEvent.class); }
@Override public KTable<Windowed<K>, Long> count() { return count(NamedInternal.empty()); }
@Test public void shouldCountSlidingWindows() { final MockApiProcessorSupplier<Windowed<String>, Long, Void, Void> supplier = new MockApiProcessorSupplier<>(); windowedStream .count() .toStream() .process(supplier); try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) { processData(driver); } assertThat( supplier.theCapturedProcessor().lastValueAndTimestampPerKey() .get(new Windowed<>("1", new TimeWindow(0L, 100L))), equalTo(ValueAndTimestamp.make(1L, 100L))); assertThat( supplier.theCapturedProcessor().lastValueAndTimestampPerKey() .get(new Windowed<>("1", new TimeWindow(101L, 201L))), equalTo(ValueAndTimestamp.make(1L, 150L))); assertThat( supplier.theCapturedProcessor().lastValueAndTimestampPerKey() .get(new Windowed<>("1", new TimeWindow(50L, 150L))), equalTo(ValueAndTimestamp.make(2L, 150L))); assertThat( supplier.theCapturedProcessor().lastValueAndTimestampPerKey() .get(new Windowed<>("1", new TimeWindow(400L, 500L))), equalTo(ValueAndTimestamp.make(1L, 500L))); assertThat( supplier.theCapturedProcessor().lastValueAndTimestampPerKey() .get(new Windowed<>("2", new TimeWindow(100L, 200L))), equalTo(ValueAndTimestamp.make(2L, 200L))); assertThat( supplier.theCapturedProcessor().lastValueAndTimestampPerKey() .get(new Windowed<>("2", new TimeWindow(50L, 150L))), equalTo(ValueAndTimestamp.make(1L, 150L))); assertThat( supplier.theCapturedProcessor().lastValueAndTimestampPerKey() .get(new Windowed<>("2", new TimeWindow(151L, 251L))), equalTo(ValueAndTimestamp.make(1L, 200L))); }
public long readIntLenenc() { int firstByte = readInt1(); if (firstByte < 0xfb) { return firstByte; } if (0xfb == firstByte) { return 0L; } if (0xfc == firstByte) { return readInt2(); } if (0xfd == firstByte) { return readInt3(); } return byteBuf.readLongLE(); }
@Test void assertReadIntLenencWithThreeBytes() { when(byteBuf.readUnsignedByte()).thenReturn((short) 0xfd); when(byteBuf.readUnsignedMediumLE()).thenReturn(99999); assertThat(new MySQLPacketPayload(byteBuf, StandardCharsets.UTF_8).readIntLenenc(), is(99999L)); }
public static Integer parseRestBindPortFromWebInterfaceUrl(String webInterfaceUrl) { if (webInterfaceUrl != null) { final int lastColon = webInterfaceUrl.lastIndexOf(':'); if (lastColon == -1) { return -1; } else { try { return Integer.parseInt(webInterfaceUrl.substring(lastColon + 1)); } catch (NumberFormatException e) { return -1; } } } else { return -1; } }
@Test void testParseRestBindPortFromWebInterfaceUrlWithNullUrl() { assertThat(ResourceManagerUtils.parseRestBindPortFromWebInterfaceUrl(null)).isEqualTo(-1); }
@Override public void execute(final CommandLine commandLine, final Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try { defaultMQAdminExt.start(); String topic = commandLine.getOptionValue('t').trim(); TopicStatsTable topicStatsTable = defaultMQAdminExt.examineTopicStats(topic); List<MessageQueue> mqList = new LinkedList<>(); mqList.addAll(topicStatsTable.getOffsetTable().keySet()); Collections.sort(mqList); System.out.printf("%-32s %-4s %-20s %-20s %s%n", "#Broker Name", "#QID", "#Min Offset", "#Max Offset", "#Last Updated" ); for (MessageQueue mq : mqList) { TopicOffset topicOffset = topicStatsTable.getOffsetTable().get(mq); String humanTimestamp = ""; if (topicOffset.getLastUpdateTimestamp() > 0) { humanTimestamp = UtilAll.timeMillisToHumanString2(topicOffset.getLastUpdateTimestamp()); } System.out.printf("%-32s %-4d %-20d %-20d %s%n", UtilAll.frontStringAtLeast(mq.getBrokerName(), 32), mq.getQueueId(), topicOffset.getMinOffset(), topicOffset.getMaxOffset(), humanTimestamp ); } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
@Test public void testExecute() { TopicStatusSubCommand cmd = new TopicStatusSubCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[] {"-t unit-test"}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); assertThat(commandLine.getOptionValue('t').trim()).isEqualTo("unit-test"); }
static int[] findMinMaxLengthsInSymbols(String[] symbols) { int min = Integer.MAX_VALUE; int max = 0; for (String symbol : symbols) { int len = symbol.length(); // some SENTINEL values can be empty strings, the month at index 12 or the weekday at index 0 if(len == 0) continue; min = Math.min(min, len); max = Math.max(max, len); } return new int[]{min, max}; }
@Test public void emptyStringValuesShouldBeIgnoredByFindMinMaxLengthsInSymbols() { String[] symbols = new String[]{"aaa", ""}; int[] results = CharSequenceToRegexMapper.findMinMaxLengthsInSymbols(symbols); assertEquals(3, results[0]); assertEquals(3, results[1]); }
@SuppressWarnings({"rawtypes", "unchecked"}) @Override public Object getValue(final int columnIndex, final Class<?> type) throws SQLException { Optional<ColumnProjection> columnProjection = selectStatementContext.getProjectionsContext().findColumnProjection(columnIndex); if (!columnProjection.isPresent()) { return mergedResult.getValue(columnIndex, type); } Optional<MaskTable> maskTable = maskRule.findMaskTable(columnProjection.get().getOriginalTable().getValue()); if (!maskTable.isPresent()) { return mergedResult.getValue(columnIndex, type); } Optional<MaskAlgorithm> maskAlgorithm = maskTable.get().findAlgorithm(columnProjection.get().getName().getValue()); if (!maskAlgorithm.isPresent()) { return mergedResult.getValue(columnIndex, type); } Object originalValue = mergedResult.getValue(columnIndex, Object.class); return null == originalValue ? null : maskAlgorithm.get().mask(originalValue); }
@Test void assertGetValueWithoutMaskAlgorithm() throws SQLException { when(mergedResult.getValue(1, String.class)).thenReturn("VALUE"); assertThat(new MaskMergedResult(mockMaskAlgorithmAbsent(), mockSelectStatementContext(), mergedResult).getValue(1, String.class), is("VALUE")); }
@Override public void addAll(long[] hashes) { for (long hash : hashes) { add(hash); } }
@Test public void addAll() { hyperLogLog.addAll(new long[]{1L, 1L, 2000L, 3000L, 40000L}); assertEquals(4L, hyperLogLog.estimate()); }
public static void print(final Options options) { print(options, new TerminalHelpFormatter()); }
@Test public void testPrintWidth100() { TerminalHelpPrinter.print(TerminalOptionsBuilder.options(), new TerminalHelpFormatter(100)); }
public RowExpression extract(PlanNode node) { return node.accept(new Visitor(domainTranslator, functionAndTypeManager), null); }
@Test public void testInnerJoinPropagatesPredicatesViaEquiConditions() { Map<VariableReferenceExpression, ColumnHandle> leftAssignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(AV, BV, CV))); TableScanNode leftScan = tableScanNode(leftAssignments); Map<VariableReferenceExpression, ColumnHandle> rightAssignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(DV, EV, FV))); TableScanNode rightScan = tableScanNode(rightAssignments); FilterNode left = filter(leftScan, equals(AV, bigintLiteral(10))); // predicates on "a" column should be propagated to output symbols via join equi conditions PlanNode node = new JoinNode( Optional.empty(), newId(), JoinType.INNER, left, rightScan, ImmutableList.of(new EquiJoinClause(AV, DV)), ImmutableList.<VariableReferenceExpression>builder() .addAll(rightScan.getOutputVariables()) .build(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of()); RowExpression effectivePredicate = effectivePredicateExtractor.extract(node); assertEquals( normalizeConjuncts(effectivePredicate), normalizeConjuncts(equals(DV, bigintLiteral(10)))); }
public static String trimToEmpty(String str) { return str == null ? EMPTY : str.trim(); }
@Test public void testTrimToEmpty() { Assert.assertEquals("", StringUtil.trimToEmpty("")); Assert.assertEquals("", StringUtil.trimToEmpty(null)); }
@Override public Collection<JobID> getJobIds() throws Exception { LOG.debug("Retrieving all stored job ids from {}.", jobGraphStateHandleStore); final Collection<String> names; try { names = jobGraphStateHandleStore.getAllHandles(); } catch (Exception e) { throw new Exception( "Failed to retrieve all job ids from " + jobGraphStateHandleStore + ".", e); } final List<JobID> jobIds = new ArrayList<>(names.size()); for (String name : names) { try { jobIds.add(jobGraphStoreUtil.nameToJobID(name)); } catch (Exception exception) { LOG.warn( "Could not parse job id from {}. This indicates a malformed name.", name, exception); } } LOG.info("Retrieved job ids {} from {}", jobIds, jobGraphStateHandleStore); return jobIds; }
@Test public void testGetJobIds() throws Exception { final List<JobID> existingJobIds = Arrays.asList(new JobID(0, 0), new JobID(0, 1)); final TestingStateHandleStore<JobGraph> stateHandleStore = builder.setGetAllHandlesSupplier( () -> existingJobIds.stream() .map(AbstractID::toString) .collect(Collectors.toList())) .build(); final JobGraphStore jobGraphStore = createAndStartJobGraphStore(stateHandleStore); final Collection<JobID> jobIds = jobGraphStore.getJobIds(); assertThat(jobIds, contains(existingJobIds.toArray())); }
@GET @Produces(MediaType.APPLICATION_JSON) @Path("{networkId}/hosts") public Response getVirtualHosts(@PathParam("networkId") long networkId) { NetworkId nid = NetworkId.networkId(networkId); Set<VirtualHost> vhosts = vnetService.getVirtualHosts(nid); return ok(encodeArray(VirtualHost.class, "hosts", vhosts)).build(); }
@Test public void testGetVirtualHostsArray() { NetworkId networkId = networkId3; final Set<VirtualHost> vhostSet = ImmutableSet.of(vhost1, vhost2); expect(mockVnetService.getVirtualHosts(networkId)).andReturn(vhostSet).anyTimes(); replay(mockVnetService); WebTarget wt = target(); String location = "vnets/" + networkId.toString() + "/hosts"; String response = wt.path(location).request().get(String.class); assertThat(response, containsString("{\"hosts\":[")); final JsonObject result = Json.parse(response).asObject(); assertThat(result, notNullValue()); assertThat(result.names(), hasSize(1)); assertThat(result.names().get(0), is("hosts")); final JsonArray vnetJsonArray = result.get("hosts").asArray(); assertThat(vnetJsonArray, notNullValue()); assertEquals("Virtual hosts array is not the correct size.", vhostSet.size(), vnetJsonArray.size()); vhostSet.forEach(vhost -> assertThat(vnetJsonArray, hasVhost(vhost))); verify(mockVnetService); }
@Override public String getClusterInstanceId() throws BookieException { try { return store.get(ledgersRootPath + "/" + INSTANCEID) .get(BLOCKING_CALL_TIMEOUT, MILLISECONDS) .map(res -> new String(res.getValue(), UTF_8)) .orElseThrow( () -> new BookieException.MetadataStoreException("BookKeeper cluster not initialized")); } catch (ExecutionException | InterruptedException | TimeoutException e) { throw new BookieException.MetadataStoreException("Failed to get cluster instance id", e); } }
@Test(dataProvider = "impl") public void testGetClusterInstanceIdIfClusterNotInitialized(String provider, Supplier<String> urlSupplier) throws Exception { methodSetup(urlSupplier); try { registrationManager.getClusterInstanceId(); fail("Should fail getting cluster instance id if cluster not initialized"); } catch (BookieException.MetadataStoreException e) { assertTrue(e.getMessage().contains("BookKeeper cluster not initialized")); } }
public static KiePMMLLocalTransformations getKiePMMLLocalTransformations(final LocalTransformations localTransformations, final List<Field<?>> fields) { final List<KiePMMLDerivedField> kiePMMLDerivedFields = localTransformations.getDerivedFields().stream() .map(derivedField -> getKiePMMLDerivedField(derivedField, fields)) .collect(Collectors.toList()); return KiePMMLLocalTransformations.builder(UUID.randomUUID().toString(), Collections.emptyList()) .withDerivedFields(kiePMMLDerivedFields) .build(); }
@Test void getKiePMMLLocalTransformations() { final LocalTransformations toConvert = getRandomLocalTransformations(); KiePMMLLocalTransformations retrieved = KiePMMLLocalTransformationsInstanceFactory.getKiePMMLLocalTransformations(toConvert, Collections.emptyList()); assertThat(retrieved).isNotNull(); List<DerivedField> derivedFields = toConvert.getDerivedFields(); List<KiePMMLDerivedField> derivedFieldsToVerify = retrieved.getDerivedFields(); assertThat(derivedFieldsToVerify).hasSameSizeAs(derivedFields); derivedFields.forEach(derivedFieldSource -> { Optional<KiePMMLDerivedField> derivedFieldToVerify = derivedFieldsToVerify.stream().filter(param -> param.getName().equals(derivedFieldSource.getName())) .findFirst(); assertThat(derivedFieldToVerify).isPresent(); commonVerifyKiePMMLDerivedField(derivedFieldToVerify.get(), derivedFieldSource); }); }
@Override public void initialize(Map<String, Object> config) { if (config != null) { RuntimeOpts opts = ObjectMapperFactory.getMapper().getObjectMapper().convertValue(config, RuntimeOpts.class); if (opts != null) { runtimeOpts = opts; } } else { log.warn("initialize with null config"); } }
@Test public void TestInitializeWithNullData() { BasicKubernetesManifestCustomizer customizer = new BasicKubernetesManifestCustomizer(); customizer.initialize(null); assertNotEquals(customizer.getRuntimeOpts(), null); assertNull(customizer.getRuntimeOpts().getExtraLabels()); assertNull(customizer.getRuntimeOpts().getExtraAnnotations()); assertNull(customizer.getRuntimeOpts().getNodeSelectorLabels()); assertNull(customizer.getRuntimeOpts().getTolerations()); assertNull(customizer.getRuntimeOpts().getResourceRequirements()); }
public static AnnotateImagesFromGcsUri annotateImagesFromGcsUri( PCollectionView<Map<String, ImageContext>> contextSideInput, List<Feature> features, long batchSize, int desiredRequestParallelism) { return new AnnotateImagesFromGcsUri( contextSideInput, features, batchSize, desiredRequestParallelism); }
@Test public void shouldConvertStringToRequest() { CloudVision.AnnotateImagesFromGcsUri annotateImagesFromGcsUri = CloudVision.annotateImagesFromGcsUri(null, features, 1, 1); AnnotateImageRequest request = annotateImagesFromGcsUri.mapToRequest(TEST_URI, null); assertEquals(1, request.getFeaturesCount()); assertEquals(TEST_URI, request.getImage().getSource().getGcsImageUri()); }
@Override public @NotNull INode enrich(@NotNull INode node) { if (node instanceof Signature signature) { return enrich(signature); } return node; }
@Test void shaAndRSA() { DetectionLocation testDetectionLocation = new DetectionLocation("testfile", 1, 1, List.of("test"), () -> "SSL"); final RSA rsa = new RSA(Signature.class, testDetectionLocation); rsa.put(new SHA2(224, new SHA2(512, testDetectionLocation), testDetectionLocation)); this.logBefore(rsa); final SignatureEnricher signatureEnricher = new SignatureEnricher(); final INode enriched = signatureEnricher.enrich(rsa); this.logAfter(enriched); assertThat(rsa.hasChildOfType(Oid.class)).isPresent(); assertThat(rsa.hasChildOfType(Oid.class).get().asString()) .isEqualTo("1.2.840.113549.1.1.15"); }
public static DnsServerAddresses singleton(final InetSocketAddress address) { checkNotNull(address, "address"); if (address.isUnresolved()) { throw new IllegalArgumentException("cannot use an unresolved DNS server address: " + address); } return new SingletonDnsServerAddresses(address); }
@Test public void testSingleton() { DnsServerAddresses seq = DnsServerAddresses.singleton(ADDR1); // Should return the same iterator instance for least possible footprint. assertThat(seq.stream(), is(sameInstance(seq.stream()))); DnsServerAddressStream i = seq.stream(); assertNext(i, ADDR1); assertNext(i, ADDR1); assertNext(i, ADDR1); }
public Analysis analyze(Statement statement) { return analyze(statement, false); }
@Test public void testAnalyze() { analyze("ANALYZE t1"); analyze("ANALYZE t1 WITH (p1 = 'p1')"); assertFails(DUPLICATE_PROPERTY, ".* Duplicate property: p1", "ANALYZE t1 WITH (p1 = 'p1', p2 = 2, p1 = 'p3')"); assertFails(DUPLICATE_PROPERTY, ".* Duplicate property: p1", "ANALYZE t1 WITH (p1 = 'p1', \"p1\" = 'p2')"); }
public static boolean sendTimeoutNowAndStop(final ThreadId id, final int timeoutMs) { final Replicator r = (Replicator) id.lock(); if (r == null) { return false; } // id unlock in sendTimeoutNow r.sendTimeoutNow(true, true, timeoutMs); return true; }
@Test public void testSendTimeoutNowAndStop() { final Replicator r = getReplicator(); this.id.unlock(); r.setHasSucceeded(); assertEquals(0, r.getTimeoutNowIndex()); assertNull(r.getTimeoutNowInFly()); assertTrue(Replicator.sendTimeoutNowAndStop(this.id, 10)); assertEquals(0, r.getTimeoutNowIndex()); assertNull(r.getTimeoutNowInFly()); final RpcRequests.TimeoutNowRequest request = createTimeoutnowRequest(); Mockito.verify(this.rpcService).timeoutNow(Matchers.eq(this.opts.getPeerId().getEndpoint()), eq(request), eq(10), Mockito.any()); }
@Override public void define(Context context) { NewController controller = context.createController(MeasuresWsParameters.CONTROLLER_MEASURES) .setSince("5.4") .setDescription("Get components or children with specified measures."); for (MeasuresWsAction action : actions) { action.define(controller); } controller.done(); }
@Test public void define_ws() { WebService.Context context = new WebService.Context(); underTest.define(context); WebService.Controller controller = context.controller("api/measures"); assertThat(controller).isNotNull(); assertThat(controller.since()).isEqualTo("5.4"); }
public int validate( final ServiceContext serviceContext, final List<ParsedStatement> statements, final SessionProperties sessionProperties, final String sql ) { requireSandbox(serviceContext); final KsqlExecutionContext ctx = requireSandbox(snapshotSupplier.apply(serviceContext)); final Injector injector = injectorFactory.apply(ctx, serviceContext); final KsqlConfig ksqlConfig = ctx.getKsqlConfig(); int numPersistentQueries = 0; for (final ParsedStatement parsed : statements) { final PreparedStatement<?> prepared = ctx.prepare( parsed, (isVariableSubstitutionEnabled(sessionProperties, ksqlConfig) ? sessionProperties.getSessionVariables() : Collections.emptyMap()) ); final ConfiguredStatement<?> configured = ConfiguredStatement.of(prepared, SessionConfig.of(ksqlConfig, sessionProperties.getMutableScopedProperties()) ); final int currNumPersistentQueries = validate( serviceContext, configured, sessionProperties, ctx, injector ); numPersistentQueries += currNumPersistentQueries; if (currNumPersistentQueries > 0 && QueryCapacityUtil.exceedsPersistentQueryCapacity(ctx, ksqlConfig)) { QueryCapacityUtil.throwTooManyActivePersistentQueriesException(ctx, ksqlConfig, sql); } } return numPersistentQueries; }
@Test public void shouldThrowIfNoValidatorAvailable() { // Given: final List<ParsedStatement> statements = givenParsed("EXPLAIN X;"); // When: final Exception e = assertThrows( KsqlStatementException.class, () -> validator.validate(serviceContext, statements, sessionProperties, "sql") ); // Then: assertThat(e.getMessage(), containsString( "Do not know how to validate statement")); }
public RuntimeOptionsBuilder parse(Map<String, String> properties) { return parse(properties::get); }
@Test void should_parse_filter_name() { properties.put(Constants.FILTER_NAME_PROPERTY_NAME, "Test.*"); RuntimeOptions options = cucumberPropertiesParser.parse(properties).build(); assertThat(options.getNameFilters().get(0).pattern(), equalTo( "Test.*")); }
public CruiseConfig deserializeConfig(String content) throws Exception { String md5 = md5Hex(content); Element element = parseInputStream(new ByteArrayInputStream(content.getBytes())); LOGGER.debug("[Config Save] Updating config cache with new XML"); CruiseConfig configForEdit = classParser(element, BasicCruiseConfig.class, configCache, new GoCipher(), registry, new ConfigReferenceElements()).parse(); setMd5(configForEdit, md5); configForEdit.setOrigins(new FileConfigOrigin()); return configForEdit; }
@Test void shouldLoadMaterialNameIfPresent() throws Exception { CruiseConfig config = xmlLoader.deserializeConfig(MATERIAL_WITH_NAME); MaterialConfigs materialConfigs = config.pipelineConfigByName(new CaseInsensitiveString("pipeline")).materialConfigs(); assertThat(materialConfigs.get(0).getName()).isEqualTo(new CaseInsensitiveString("svn")); assertThat(materialConfigs.get(1).getName()).isEqualTo(new CaseInsensitiveString("hg")); }
public static IpPrefix valueOf(int address, int prefixLength) { return new IpPrefix(IpAddress.valueOf(address), prefixLength); }
@Test(expected = IllegalArgumentException.class) public void testInvalidValueOfByteArrayTooLongPrefixLengthIPv6() { IpPrefix ipPrefix; byte[] value; value = new byte[] {0x11, 0x11, 0x22, 0x22, 0x33, 0x33, 0x44, 0x44, 0x55, 0x55, 0x66, 0x66, 0x77, 0x77, (byte) 0x88, (byte) 0x88}; ipPrefix = IpPrefix.valueOf(IpAddress.Version.INET6, value, 129); }
@Override public Timer timer(String name) { return NoopTimer.INSTANCE; }
@Test public void accessingATimerRegistersAndReusesIt() { final Timer timer1 = registry.timer("thing"); final Timer timer2 = registry.timer("thing"); assertThat(timer1).isExactlyInstanceOf(NoopMetricRegistry.NoopTimer.class); assertThat(timer2).isExactlyInstanceOf(NoopMetricRegistry.NoopTimer.class); assertThat(timer1).isSameAs(timer2); verify(listener, never()).onTimerAdded("thing", timer1); }
public static <T> Interner<T> createWeakInterner(){ return new WeakInterner<>(); }
@SuppressWarnings("StringOperationCanBeSimplified") @Test public void weakTest(){ final Interner<String> interner = InternUtil.createWeakInterner(); String a1 = RandomUtil.randomString(RandomUtil.randomInt(100)); String a2 = new String(a1); assertNotSame(a1, a2); assertSame(interner.intern(a1), interner.intern(a2)); }
public static boolean isValidCard(String idCard) { if (StrUtil.isBlank(idCard)) { return false; } //idCard = idCard.trim(); int length = idCard.length(); switch (length) { case 18:// 18位身份证 return isValidCard18(idCard); case 15:// 15位身份证 return isValidCard15(idCard); case 10: {// 10位身份证,港澳台地区 String[] cardVal = isValidCard10(idCard); return null != cardVal && "true".equals(cardVal[2]); } default: return false; } }
@Test public void isValidCardTest() { boolean valid = IdcardUtil.isValidCard(ID_18); assertTrue(valid); boolean valid15 = IdcardUtil.isValidCard(ID_15); assertTrue(valid15); assertTrue(IdcardUtil.isValidCard(FOREIGN_ID_18)); // 无效 String idCard = "360198910283844"; assertFalse(IdcardUtil.isValidCard(idCard)); // 生日无效 idCard = "201511221897205960"; assertFalse(IdcardUtil.isValidCard(idCard)); // 生日无效 idCard = "815727834224151"; assertFalse(IdcardUtil.isValidCard(idCard)); }
public static void verifyKafkaBrokers(Properties props) { //ensure bootstrap.servers is assigned String brokerList = getString(BOOTSTRAP_SERVERS_CONFIG, props); //usually = "bootstrap.servers" String[] brokers = brokerList.split(","); for (String broker : brokers) { checkArgument( broker.contains(":"), "Proper broker formatting requires a \":\" between the host and the port (input=" + broker + ")" ); String host = broker.substring(0, broker.indexOf(":")); //we could validate the host is we wanted to String port = broker.substring(broker.indexOf(":") + 1); parseInt(port);//every port should be an integer } }
@Test public void verifyKafkaBrokers_happyPath() { Properties props = new Properties(); props.setProperty("bootstrap.servers", "localhost:9092"); //does nothing when input is valid verifyKafkaBrokers(props); }
@Override protected Release findLatestActiveRelease(String configAppId, String configClusterName, String configNamespace, ApolloNotificationMessages clientMessages) { return releaseService.findLatestActiveRelease(configAppId, configClusterName, configNamespace); }
@Test public void testLoadConfigWithDefaultClusterWithDataCenterRelease() throws Exception { when(releaseService.findLatestActiveRelease(someConfigAppId, someDataCenter, defaultNamespaceName)) .thenReturn(someRelease); Release release = configService .loadConfig(someClientAppId, someClientIp, someClientLabel, someConfigAppId, defaultClusterName, defaultNamespaceName, someDataCenter, someNotificationMessages); verify(releaseService, times(1)).findLatestActiveRelease(someConfigAppId, someDataCenter, defaultNamespaceName); assertEquals(someRelease, release); }
public static Duration parse(final String text) { try { final String[] parts = text.split("\\s"); if (parts.length != 2) { throw new IllegalArgumentException("Expected 2 tokens, got: " + parts.length); } final long size = parseNumeric(parts[0]); return buildDuration(size, parts[1]); } catch (final Exception e) { throw new IllegalArgumentException("Invalid duration: '" + text + "'. " + e.getMessage(), e); } }
@Test public void shouldSupportDays() { assertThat(DurationParser.parse("98 Day"), is(Duration.ofDays(98))); }
public static long validateExpiration(String claimName, Long claimValue) throws ValidateException { if (claimValue == null) throw new ValidateException(String.format("%s value must be non-null", claimName)); if (claimValue < 0) throw new ValidateException(String.format("%s value must be non-negative; value given was \"%s\"", claimName, claimValue)); return claimValue; }
@Test public void testValidateExpirationDisallowsNegatives() { assertThrows(ValidateException.class, () -> ClaimValidationUtils.validateExpiration("exp", -1L)); }
@Override public void error(String msg) { logger.error(msg); logErrorToJobDashboard(msg); }
@Test void testErrorLoggingWithoutJob() { jobRunrDashboardLogger.error("simple message"); verify(slfLogger).error("simple message"); }
@Override public <T> T target(FeignClientFactoryBean factory, Feign.Builder feign, FeignClientFactory context, Target.HardCodedTarget<T> target) { if (!(feign instanceof PolarisFeignCircuitBreaker.Builder)) { return feign.target(target); } PolarisFeignCircuitBreaker.Builder builder = (PolarisFeignCircuitBreaker.Builder) feign; String name = !StringUtils.hasText(factory.getContextId()) ? factory.getName() : factory.getContextId(); Class<?> fallback = factory.getFallback(); if (fallback != void.class) { return targetWithFallback(name, context, target, builder, fallback); } Class<?> fallbackFactory = factory.getFallbackFactory(); if (fallbackFactory != void.class) { return targetWithFallbackFactory(name, context, target, builder, fallbackFactory); } return builder(name, builder).target(target); }
@Test public void testTarget2() { PolarisFeignCircuitBreakerTargeter targeter = new PolarisFeignCircuitBreakerTargeter(circuitBreakerFactory, circuitBreakerNameResolver); FeignClientFactoryBean feignClientFactoryBean = mock(FeignClientFactoryBean.class); doReturn(TestApi.class).when(feignClientFactoryBean).getFallback(); doReturn("test").when(feignClientFactoryBean).getName(); FeignClientFactory feignClientFactory = mock(FeignClientFactory.class); doReturn(null).when(feignClientFactory).getInstance("test", TestApi.class); assertThatThrownBy(() -> { targeter.target(feignClientFactoryBean, new PolarisFeignCircuitBreaker.Builder(), feignClientFactory, new Target.HardCodedTarget<>(TestApi.class, "/test")); }).isInstanceOf(IllegalStateException.class); }
public static PredicateTreeAnalyzerResult analyzePredicateTree(Predicate predicate) { AnalyzerContext context = new AnalyzerContext(); int treeSize = aggregatePredicateStatistics(predicate, false, context); int minFeature = ((int)Math.ceil(findMinFeature(predicate, false, context))) + (context.hasNegationPredicate ? 1 : 0); return new PredicateTreeAnalyzerResult(minFeature, treeSize, context.subTreeSizes); }
@Test void require_that_minfeature_is_sum_for_and() { Predicate p = and( feature("foo").inSet("bar"), feature("baz").inSet("qux"), feature("quux").inSet("corge")); PredicateTreeAnalyzerResult r = PredicateTreeAnalyzer.analyzePredicateTree(p); assertEquals(3, r.minFeature); assertEquals(3, r.treeSize); assertEquals(3, r.sizeMap.size()); assertSizeMapContains(r, pred(p).child(0), 1); assertSizeMapContains(r, pred(p).child(1), 1); assertSizeMapContains(r, pred(p).child(2), 1); }
public synchronized CryptoKey getOrCreateCryptoKey(String keyRingId, String keyName) { // Get the keyring, creating it if it does not already exist if (keyRing == null) { maybeCreateKeyRing(keyRingId); } try (KeyManagementServiceClient client = clientFactory.getKMSClient()) { // Build the symmetric key to create. CryptoKey keyToCreate = CryptoKey.newBuilder() .setPurpose(CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT) .setVersionTemplate( CryptoKeyVersionTemplate.newBuilder() .setAlgorithm( CryptoKeyVersion.CryptoKeyVersionAlgorithm.GOOGLE_SYMMETRIC_ENCRYPTION)) .build(); LOG.info("Checking if symmetric key {} already exists in KMS.", keyName); // Loop through the existing keys in the given keyring to see if the // key already exists. String newKeyName = CryptoKeyName.of(projectId, region, keyRingId, keyName).toString(); Optional<CryptoKey> existingKey = StreamSupport.stream( client.listCryptoKeys(keyRing.getName()).iterateAll().spliterator(), false) .filter(kRing -> kRing.getName().equals(newKeyName)) .findFirst(); // Create the symmetric key if it does not exist, otherwise, return the found key. CryptoKey cryptoKey; if (!existingKey.isPresent()) { LOG.info("Symmetric key {} does not exist. Creating the key in KMS.", keyName); cryptoKey = client.createCryptoKey(keyRing.getName(), keyName, keyToCreate); LOG.info("Created symmetric key {}.", cryptoKey.getName()); } else { LOG.info("Symmetric key {} already exists. Retrieving the key from KMS.", keyName); cryptoKey = existingKey.get(); LOG.info("Retrieved symmetric key {}.", cryptoKey.getName()); } return cryptoKey; } }
@Test public void testGetOrCreateCryptoKeyShouldThrowErrorWhenClientFailsToConnect() { when(kmsClientFactory.getKMSClient()).thenThrow(KMSResourceManagerException.class); assertThrows( KMSResourceManagerException.class, () -> testManager.getOrCreateCryptoKey(KEYRING_ID, KEY_ID)); }
public Span nextSpan(ConsumerRecord<?, ?> record) { // Even though the type is ConsumerRecord, this is not a (remote) consumer span. Only "poll" // events create consumer spans. Since this is a processor span, we use the normal sampler. TraceContextOrSamplingFlags extracted = extractAndClearTraceIdHeaders(processorExtractor, record.headers(), record.headers()); Span result = tracer.nextSpan(extracted); if (extracted.context() == null && !result.isNoop()) { addTags(record, result); } return result; }
@Test void nextSpan_should_tag_topic_and_key_when_no_incoming_context() { kafkaTracing.nextSpan(consumerRecord).start().finish(); assertThat(spans.get(0).tags()) .containsOnly(entry("kafka.topic", TEST_TOPIC), entry("kafka.key", TEST_KEY)); }
@Override public void run() { try { // make sure we call afterRun() even on crashes // and operate countdown latches, else we may hang the parallel runner if (steps == null) { beforeRun(); } if (skipped) { return; } int count = steps.size(); int index = 0; while ((index = nextStepIndex()) < count) { currentStep = steps.get(index); execute(currentStep); if (currentStepResult != null) { // can be null if debug step-back or hook skip result.addStepResult(currentStepResult); } } } catch (Exception e) { if (currentStepResult != null) { result.addStepResult(currentStepResult); } logError("scenario [run] failed\n" + StringUtils.throwableToString(e)); currentStepResult = result.addFakeStepResult("scenario [run] failed", e); } finally { if (!skipped) { afterRun(); if (isFailed() && engine.getConfig().isAbortSuiteOnFailure()) { featureRuntime.suite.abort(); } } if (caller.isNone()) { logAppender.close(); // reclaim memory } } }
@Test void testEval() { run( "def foo = karate.eval('() => 1 + 2')", "def bar = foo()" ); assertTrue(sr.engine.vars.get("foo").isJsFunction()); matchVar("bar", 3); }
@Override @Deprecated public <VR> KStream<K, VR> transformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, ? extends VR> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), NamedInternal.empty(), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullValueTransformerSupplierOnTransformValuesWithStores() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.transformValues( (org.apache.kafka.streams.kstream.ValueTransformerSupplier<Object, Object>) null, "storeName")); assertThat(exception.getMessage(), equalTo("valueTransformerSupplier can't be null")); }
@Override public void provideMetrics(Node node, MetricsCollectionContext context) { InternalSqlService sqlService = node.getNodeEngine().getSqlService(); long sqlQueriesSubmittedCount = sqlService.getSqlQueriesSubmittedCount(); context.collect(SQL_QUERIES_SUBMITTED, sqlQueriesSubmittedCount); long sqlStreamingQueriesExecutedCount = sqlService.getSqlStreamingQueriesExecutedCount(); context.collect(SQL_STREAMING_QUERIES_EXECUTED, sqlStreamingQueriesExecutedCount); }
@Test public void test_SqlQueries() { // given when(sqlService.getSqlQueriesSubmittedCount()).thenReturn(5L); when(sqlService.getSqlStreamingQueriesExecutedCount()).thenReturn(3L); // when sqlMetricsProvider.provideMetrics(node, context); // then verify(context).collect(SQL_QUERIES_SUBMITTED, 5L); verify(context).collect(SQL_STREAMING_QUERIES_EXECUTED, 3L); verifyNoMoreInteractions(context); }
protected Map<String, String> parseJettyOptions( Node node ) { Map<String, String> jettyOptions = null; Node jettyOptionsNode = XMLHandler.getSubNode( node, XML_TAG_JETTY_OPTIONS ); if ( jettyOptionsNode != null ) { jettyOptions = new HashMap<String, String>(); if ( XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPTORS ) != null ) { jettyOptions.put( Const.KETTLE_CARTE_JETTY_ACCEPTORS, XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPTORS ) ); } if ( XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPT_QUEUE_SIZE ) != null ) { jettyOptions.put( Const.KETTLE_CARTE_JETTY_ACCEPT_QUEUE_SIZE, XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPT_QUEUE_SIZE ) ); } if ( XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_LOW_RES_MAX_IDLE_TIME ) != null ) { jettyOptions.put( Const.KETTLE_CARTE_JETTY_RES_MAX_IDLE_TIME, XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_LOW_RES_MAX_IDLE_TIME ) ); } } return jettyOptions; }
@Test public void testParseJettyOption_EmptyOptionsNode() throws KettleXMLException { Node configNode = getConfigNode( getConfigWithEmptyOptionsNode() ); Map<String, String> parseJettyOptions = slServerConfig.parseJettyOptions( configNode ); assertNotNull( parseJettyOptions ); assertEquals( 0, parseJettyOptions.size() ); }
@NonNull public ConnectionFileName getConnectionRootFileName( @NonNull VFSConnectionDetails details ) { String connectionName = details.getName(); if ( StringUtils.isEmpty( connectionName ) ) { throw new IllegalArgumentException( "Unnamed connection" ); } return new ConnectionFileName( connectionName ); }
@Test( expected = IllegalArgumentException.class ) public void testGetConnectionRootFileNameThrowsIllegalArgumentGivenConnectionHasEmptyName() { when( vfsConnectionDetails.getName() ).thenReturn( "" ); vfsConnectionManagerHelper.getConnectionRootFileName( vfsConnectionDetails ); }
public static Builder builder() { return new Builder(); }
@Ignore("non-determinism between OpenJDK versions") @Test public void builderTest() { Locale locale; if (Locale.getDefault().equals(GERMANY)) locale = FRANCE; else locale = GERMANY; assertEquals(BtcFormat.builder().build(), BtcFormat.getCoinInstance()); try { BtcFormat.builder().scale(0).style(CODE); fail("Invoking both scale() and style() on a Builder should raise exception"); } catch (IllegalStateException e) {} try { BtcFormat.builder().style(CODE).scale(0); fail("Invoking both style() and scale() on a Builder should raise exception"); } catch (IllegalStateException e) {} BtcFormat built = BtcFormat.builder().style(BtcAutoFormat.Style.CODE).fractionDigits(4).build(); assertEquals(built, BtcFormat.getCodeInstance(4)); built = BtcFormat.builder().style(BtcAutoFormat.Style.SYMBOL).fractionDigits(4).build(); assertEquals(built, BtcFormat.getSymbolInstance(4)); built = BtcFormat.builder().scale(0).build(); assertEquals(built, BtcFormat.getCoinInstance()); built = BtcFormat.builder().scale(3).build(); assertEquals(built, BtcFormat.getMilliInstance()); built = BtcFormat.builder().scale(6).build(); assertEquals(built, BtcFormat.getMicroInstance()); built = BtcFormat.builder().locale(locale).scale(0).build(); assertEquals(built, BtcFormat.getCoinInstance(locale)); built = BtcFormat.builder().locale(locale).scale(3).build(); assertEquals(built, BtcFormat.getMilliInstance(locale)); built = BtcFormat.builder().locale(locale).scale(6).build(); assertEquals(built, BtcFormat.getMicroInstance(locale)); built = BtcFormat.builder().minimumFractionDigits(3).scale(0).build(); assertEquals(built, BtcFormat.getCoinInstance(3)); built = BtcFormat.builder().minimumFractionDigits(3).scale(3).build(); assertEquals(built, BtcFormat.getMilliInstance(3)); built = BtcFormat.builder().minimumFractionDigits(3).scale(6).build(); assertEquals(built, BtcFormat.getMicroInstance(3)); built = BtcFormat.builder().fractionGroups(3,4).scale(0).build(); assertEquals(built, BtcFormat.getCoinInstance(2,3,4)); built = BtcFormat.builder().fractionGroups(3,4).scale(3).build(); assertEquals(built, BtcFormat.getMilliInstance(2,3,4)); built = BtcFormat.builder().fractionGroups(3,4).scale(6).build(); assertEquals(built, BtcFormat.getMicroInstance(2,3,4)); built = BtcFormat.builder().pattern("#,####.#").scale(6).locale(GERMANY).build(); assertEquals("100.0000,00", built.format(COIN)); built = BtcFormat.builder().pattern("#,####.#").scale(6).locale(GERMANY).build(); assertEquals("-100.0000,00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().localizedPattern("#.####,#").scale(6).locale(GERMANY).build(); assertEquals("100.0000,00", built.format(COIN)); built = BtcFormat.builder().pattern("¤#,####.#").style(CODE).locale(GERMANY).build(); assertEquals("฿-1,00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().pattern("¤¤ #,####.#").style(SYMBOL).locale(GERMANY).build(); assertEquals("BTC -1,00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().pattern("¤¤##,###.#").scale(3).locale(US).build(); assertEquals("mBTC1,000.00", built.format(COIN)); built = BtcFormat.builder().pattern("¤ ##,###.#").scale(3).locale(US).build(); assertEquals("₥฿ 1,000.00", built.format(COIN)); try { BtcFormat.builder().pattern("¤¤##,###.#").scale(4).locale(US).build().format(COIN); fail("Pattern with currency sign and non-standard denomination should raise exception"); } catch (IllegalStateException e) {} try { BtcFormat.builder().localizedPattern("¤¤##,###.#").scale(4).locale(US).build().format(COIN); fail("Localized pattern with currency sign and non-standard denomination should raise exception"); } catch (IllegalStateException e) {} built = BtcFormat.builder().style(SYMBOL).symbol("B\u20e6").locale(US).build(); assertEquals("B⃦1.00", built.format(COIN)); built = BtcFormat.builder().style(CODE).code("XBT").locale(US).build(); assertEquals("XBT 1.00", built.format(COIN)); built = BtcFormat.builder().style(SYMBOL).symbol("$").locale(GERMANY).build(); assertEquals("1,00 $", built.format(COIN)); // Setting the currency code on a DecimalFormatSymbols object can affect the currency symbol. built = BtcFormat.builder().style(SYMBOL).code("USD").locale(US).build(); assertEquals("฿1.00", built.format(COIN)); built = BtcFormat.builder().style(SYMBOL).symbol("B\u20e6").locale(US).build(); assertEquals("₥B⃦1.00", built.format(COIN.divide(1000))); built = BtcFormat.builder().style(CODE).code("XBT").locale(US).build(); assertEquals("mXBT 1.00", built.format(COIN.divide(1000))); built = BtcFormat.builder().style(SYMBOL).symbol("B\u20e6").locale(US).build(); assertEquals("µB⃦1.00", built.format(valueOf(100))); built = BtcFormat.builder().style(CODE).code("XBT").locale(US).build(); assertEquals("µXBT 1.00", built.format(valueOf(100))); /* The prefix of a pattern can have number symbols in quotes. * Make sure our custom negative-subpattern creator handles this. */ built = BtcFormat.builder().pattern("'#'¤#0").scale(0).locale(US).build(); assertEquals("#฿-1.00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().pattern("'#0'¤#0").scale(0).locale(US).build(); assertEquals("#0฿-1.00", built.format(COIN.multiply(-1))); // this is an escaped quote between two hash marks in one set of quotes, not // two adjacent quote-enclosed hash-marks: built = BtcFormat.builder().pattern("'#''#'¤#0").scale(0).locale(US).build(); assertEquals("#'#฿-1.00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().pattern("'#0''#'¤#0").scale(0).locale(US).build(); assertEquals("#0'#฿-1.00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().pattern("'#0#'¤#0").scale(0).locale(US).build(); assertEquals("#0#฿-1.00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().pattern("'#0'E'#'¤#0").scale(0).locale(US).build(); assertEquals("#0E#฿-1.00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().pattern("E'#0''#'¤#0").scale(0).locale(US).build(); assertEquals("E#0'#฿-1.00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().pattern("E'#0#'¤#0").scale(0).locale(US).build(); assertEquals("E#0#฿-1.00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().pattern("E'#0''''#'¤#0").scale(0).locale(US).build(); assertEquals("E#0''#฿-1.00", built.format(COIN.multiply(-1))); built = BtcFormat.builder().pattern("''#0").scale(0).locale(US).build(); assertEquals("'-1.00", built.format(COIN.multiply(-1))); // immutability check for fixed-denomination formatters, w/ & w/o custom pattern BtcFormat a = BtcFormat.builder().scale(3).build(); BtcFormat b = BtcFormat.builder().scale(3).build(); assertEquals(a, b); assertEquals(a.hashCode(), b.hashCode()); a.format(COIN.multiply(1000000)); assertEquals(a, b); assertEquals(a.hashCode(), b.hashCode()); b.format(COIN.divide(1000000)); assertEquals(a, b); assertEquals(a.hashCode(), b.hashCode()); a = BtcFormat.builder().scale(3).pattern("¤#.#").build(); b = BtcFormat.builder().scale(3).pattern("¤#.#").build(); assertEquals(a, b); assertEquals(a.hashCode(), b.hashCode()); a.format(COIN.multiply(1000000)); assertEquals(a, b); assertEquals(a.hashCode(), b.hashCode()); b.format(COIN.divide(1000000)); assertEquals(a, b); assertEquals(a.hashCode(), b.hashCode()); }
@Override protected boolean hasPermissionImpl(GlobalPermission permission) { return false; }
@Test public void hasPermissionImpl() { Arrays.stream(GlobalPermission.values()) .forEach(globalPermission -> assertThat(githubWebhookUserSession.hasPermissionImpl(globalPermission)).isFalse() ); }
@Override public CompletableFuture<GroupList> queryTopicConsumeByWho(String address, QueryTopicConsumeByWhoRequestHeader requestHeader, long timeoutMillis) { CompletableFuture<GroupList> future = new CompletableFuture<>(); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.QUERY_TOPIC_CONSUME_BY_WHO, requestHeader); remotingClient.invoke(address, request, timeoutMillis).thenAccept(response -> { if (response.getCode() == ResponseCode.SUCCESS) { GroupList groupList = GroupList.decode(response.getBody(), GroupList.class); future.complete(groupList); } else { log.warn("queryTopicConsumeByWho getResponseCommand failed, {} {}", response.getCode(), response.getRemark()); future.completeExceptionally(new MQClientException(response.getCode(), response.getRemark())); } }); return future; }
@Test public void assertQueryTopicConsumeByWhoWithSuccess() throws Exception { GroupList responseBody = new GroupList(); setResponseSuccess(RemotingSerializable.encode(responseBody)); QueryTopicConsumeByWhoRequestHeader requestHeader = mock(QueryTopicConsumeByWhoRequestHeader.class); CompletableFuture<GroupList> actual = mqClientAdminImpl.queryTopicConsumeByWho(defaultBrokerAddr, requestHeader, defaultTimeout); GroupList result = actual.get(); assertNotNull(result); assertEquals(0, result.getGroupList().size()); }
@Override public final int compareTo(VirtualFile o) { return getName().compareToIgnoreCase(o.getName()); }
@Test public void testCompareTo_GreaterThan() throws IOException { String parentFolder = "parentFolder"; File parentFile = tmp.newFolder(parentFolder); String child1 = "child1"; File childFile1 = new File(parentFile, child1); VirtualFile vf1 = new VirtualFileMinimalImplementation(childFile1); String child2 = "child2"; File childFile2 = new File(parentFile, child2); VirtualFile vf2 = new VirtualFileMinimalImplementation(childFile2); assertThat(vf2.compareTo(vf1), greaterThan(0)); }