focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Object invoke(final Object proxy, final Method method, final Object[] args) throws Throwable { switch (method.getName()) { case "equals": return equals(args.length > 0 ? args[0] : null); case "hashCode": return hashCode(); case "toString": return toString(); default: break; } return decoratedDispatch.get(method).apply(args); }
@Test public void testDecorator() throws Throwable { feignDecorator.setAlternativeFunction(fnArgs -> "AlternativeFunction"); testSubject = new DecoratorInvocationHandler(target, dispatch, feignDecorator); final Object result = testSubject.invoke(testService, greetingMethod, new Object[0]); verify(methodHandler, times(0)).invoke(any()); assertThat(feignDecorator.isCalled()) .describedAs("FeignDecorator is called") .isTrue(); assertThat(result) .describedAs("Return of invocation") .isEqualTo("AlternativeFunction"); }
static JarFileWithEntryClass findOnlyEntryClass(Iterable<File> jarFiles) throws IOException { List<JarFileWithEntryClass> jarsWithEntryClasses = new ArrayList<>(); for (File jarFile : jarFiles) { findEntryClass(jarFile) .ifPresent( entryClass -> jarsWithEntryClasses.add( new JarFileWithEntryClass(jarFile, entryClass))); } int size = jarsWithEntryClasses.size(); if (size == 0) { throw new NoSuchElementException("No JAR with manifest attribute for entry class"); } if (size == 1) { return jarsWithEntryClasses.get(0); } // else: size > 1 throw new IllegalArgumentException( "Multiple JARs with manifest attribute for entry class: " + jarsWithEntryClasses); }
@Test void testFindOnlyEntryClassMultipleJarsWithSingleManifestEntry() throws IOException { File jarWithNoManifest = createJarFileWithManifest(ImmutableMap.of()); File jarFile = TestJob.getTestJobJar(); JarManifestParser.JarFileWithEntryClass jarFileWithEntryClass = JarManifestParser.findOnlyEntryClass(ImmutableList.of(jarWithNoManifest, jarFile)); assertThat(jarFileWithEntryClass.getEntryClass()) .isEqualTo(TestJob.class.getCanonicalName()); }
@Override public void cancel() {}
@Test void testCancelIgnored() { MockFinishedContext ctx = new MockFinishedContext(); createFinishedState(ctx).cancel(); assertThat(ctx.getArchivedExecutionGraph().getState()).isEqualTo(testJobStatus); }
@Override public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { int nextValue = nextValue(topic); List<PartitionInfo> availablePartitions = cluster.availablePartitionsForTopic(topic); if (!availablePartitions.isEmpty()) { int part = Utils.toPositive(nextValue) % availablePartitions.size(); return availablePartitions.get(part).partition(); } else { // no partitions are available, give a non-available partition int numPartitions = cluster.partitionsForTopic(topic).size(); return Utils.toPositive(nextValue) % numPartitions; } }
@Test public void testRoundRobinWithKeyBytes() { final String topicA = "topicA"; final String topicB = "topicB"; List<PartitionInfo> allPartitions = asList(new PartitionInfo(topicA, 0, NODES[0], NODES, NODES), new PartitionInfo(topicA, 1, NODES[1], NODES, NODES), new PartitionInfo(topicA, 2, NODES[2], NODES, NODES), new PartitionInfo(topicB, 0, NODES[0], NODES, NODES)); Cluster testCluster = new Cluster("clusterId", asList(NODES[0], NODES[1], NODES[2]), allPartitions, Collections.emptySet(), Collections.emptySet()); final Map<Integer, Integer> partitionCount = new HashMap<>(); final byte[] keyBytes = "key".getBytes(); Partitioner partitioner = new RoundRobinPartitioner(); for (int i = 0; i < 30; ++i) { int partition = partitioner.partition(topicA, null, keyBytes, null, null, testCluster); Integer count = partitionCount.get(partition); if (null == count) count = 0; partitionCount.put(partition, count + 1); if (i % 5 == 0) { partitioner.partition(topicB, null, keyBytes, null, null, testCluster); } } assertEquals(10, partitionCount.get(0).intValue()); assertEquals(10, partitionCount.get(1).intValue()); assertEquals(10, partitionCount.get(2).intValue()); }
static void addClusterToMirrorMaker2ConnectorConfig(Map<String, Object> config, KafkaMirrorMaker2ClusterSpec cluster, String configPrefix) { config.put(configPrefix + "alias", cluster.getAlias()); config.put(configPrefix + AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers()); String securityProtocol = addTLSConfigToMirrorMaker2ConnectorConfig(config, cluster, configPrefix); if (cluster.getAuthentication() != null) { if (cluster.getAuthentication() instanceof KafkaClientAuthenticationTls) { config.put(configPrefix + SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "PKCS12"); config.put(configPrefix + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, STORE_LOCATION_ROOT + cluster.getAlias() + KEYSTORE_SUFFIX); config.put(configPrefix + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "${file:" + CONNECTORS_CONFIG_FILE + ":" + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG + "}"); } else if (cluster.getAuthentication() instanceof KafkaClientAuthenticationPlain plainAuthentication) { securityProtocol = cluster.getTls() != null ? "SASL_SSL" : "SASL_PLAINTEXT"; config.put(configPrefix + SaslConfigs.SASL_MECHANISM, "PLAIN"); config.put(configPrefix + SaslConfigs.SASL_JAAS_CONFIG, AuthenticationUtils.jaasConfig("org.apache.kafka.common.security.plain.PlainLoginModule", Map.of("username", plainAuthentication.getUsername(), "password", "${file:" + CONNECTORS_CONFIG_FILE + ":" + cluster.getAlias() + ".sasl.password}"))); } else if (cluster.getAuthentication() instanceof KafkaClientAuthenticationScram scramAuthentication) { securityProtocol = cluster.getTls() != null ? "SASL_SSL" : "SASL_PLAINTEXT"; config.put(configPrefix + SaslConfigs.SASL_MECHANISM, scramAuthentication instanceof KafkaClientAuthenticationScramSha256 ? "SCRAM-SHA-256" : "SCRAM-SHA-512"); config.put(configPrefix + SaslConfigs.SASL_JAAS_CONFIG, AuthenticationUtils.jaasConfig("org.apache.kafka.common.security.scram.ScramLoginModule", Map.of("username", scramAuthentication.getUsername(), "password", "${file:" + CONNECTORS_CONFIG_FILE + ":" + cluster.getAlias() + ".sasl.password}"))); } else if (cluster.getAuthentication() instanceof KafkaClientAuthenticationOAuth oauthAuthentication) { securityProtocol = cluster.getTls() != null ? "SASL_SSL" : "SASL_PLAINTEXT"; config.put(configPrefix + SaslConfigs.SASL_MECHANISM, "OAUTHBEARER"); config.put(configPrefix + SaslConfigs.SASL_JAAS_CONFIG, oauthJaasConfig(cluster, oauthAuthentication)); config.put(configPrefix + SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"); } } // Security protocol config.put(configPrefix + AdminClientConfig.SECURITY_PROTOCOL_CONFIG, securityProtocol); config.putAll(cluster.getConfig().entrySet().stream() .collect(Collectors.toMap(entry -> configPrefix + entry.getKey(), Map.Entry::getValue))); config.putAll(cluster.getAdditionalProperties()); }
@Test public void testAddClusterToMirrorMaker2ConnectorConfigWithoutAuthWithClusterConfig() { Map<String, Object> config = new HashMap<>(); KafkaMirrorMaker2ClusterSpec cluster = new KafkaMirrorMaker2ClusterSpecBuilder() .withAlias("sourceClusterAlias") .withBootstrapServers("sourceClusterAlias.sourceNamespace.svc:9092") .withConfig(Map.of("config.storage.replication.factor", "-1")) .build(); KafkaMirrorMaker2Connectors.addClusterToMirrorMaker2ConnectorConfig(config, cluster, PREFIX); assertThat(new TreeMap<>(config), is(new TreeMap<>(Map.of("prefix.alias", "sourceClusterAlias", "prefix.security.protocol", "PLAINTEXT", "prefix.bootstrap.servers", "sourceClusterAlias.sourceNamespace.svc:9092", "prefix.config.storage.replication.factor", "-1")))); }
@Override public boolean matchesJdbcUrl(String jdbcConnectionURL) { return StringUtils.startsWithIgnoreCase(jdbcConnectionURL, "jdbc:oracle:"); }
@Test void matchesJdbcURL() { assertThat(underTest.matchesJdbcUrl("jdbc:oracle:thin:@localhost/XE")).isTrue(); assertThat(underTest.matchesJdbcUrl("jdbc:hsql:foo")).isFalse(); }
@Override public void writeTo(View t, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException { final Timer.Context context = metricRegistry.timer(name(t.getClass(), "rendering")).time(); try { for (ViewRenderer renderer : renderers) { if (renderer.isRenderable(t)) { renderer.render(t, detectLocale(requireNonNull(headers)), entityStream); return; } } throw new ViewRenderException("Unable to find a renderer for " + t.getTemplateName()); } catch (ViewRenderException e) { throw new WebApplicationException(e); } finally { context.stop(); } }
@Test void writeToShouldThrowWhenNoValidRendererFound() { final ViewMessageBodyWriter writer = new ViewMessageBodyWriter(metricRegistry, Collections.emptyList()); when(metricRegistry.timer(anyString())).thenReturn(timer); when(timer.time()).thenReturn(timerContext); assertThatExceptionOfType(WebApplicationException.class) .isThrownBy(() -> writer.writeTo(view, Class.class, Class.class, new Annotation[]{}, new MediaType(), new MultivaluedHashMap<>(), stream)).withCauseExactlyInstanceOf(ViewRenderException.class); verify(timerContext).stop(); }
File decompress() throws IOException { return decompress(uncheck(() -> java.nio.file.Files.createTempDirectory("decompress")).toFile()); }
@Test public void require_that_valid_tar_application_can_be_unpacked() throws IOException { File outFile = createTarFile(temporaryFolder.getRoot().toPath()); try (CompressedApplicationInputStream unpacked = streamFromTarGz(outFile)) { File outApp = unpacked.decompress(); assertTestApp(outApp); } }
public List<BuiltinArtifactConfig> getBuiltInArtifactConfigs() { final List<BuiltinArtifactConfig> artifactConfigs = new ArrayList<>(); for (ArtifactTypeConfig artifactTypeConfig : this) { if (artifactTypeConfig instanceof BuiltinArtifactConfig) { artifactConfigs.add((BuiltinArtifactConfig) artifactTypeConfig); } } return artifactConfigs; }
@Test public void getArtifactConfigs_shouldReturnBuiltinArtifactConfigs() { ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs(); allConfigs.add(new BuildArtifactConfig("src", "dest")); allConfigs.add(new BuildArtifactConfig("java", null)); allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3")); allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker")); final List<BuiltinArtifactConfig> artifactConfigs = allConfigs.getBuiltInArtifactConfigs(); assertThat(artifactConfigs, hasSize(2)); assertThat(artifactConfigs, containsInAnyOrder( new BuildArtifactConfig("src", "dest"), new BuildArtifactConfig("java", null) )); }
@Override public boolean noServicesOutsideGroupIsDown() throws HostStateChangeDeniedException { return servicesDownAndNotInGroup().size() + missingServices == 0; }
@Test public void testUnknownServiceStatusOutsideGroup() { ClusterApi clusterApi = makeClusterApiWithUnknownStatus(ServiceStatus.UP, ServiceStatus.UNKNOWN, ServiceStatus.UP); try { clusterApi.noServicesOutsideGroupIsDown(); fail(); } catch (HostStateChangeDeniedException e) { assertEquals(HostedVespaPolicy.UNKNOWN_SERVICE_STATUS, e.getConstraintName()); } }
@Override public KsMaterializedQueryResult<Row> get( final GenericKey key, final int partition, final Optional<Position> position ) { try { final ReadOnlyKeyValueStore<GenericKey, ValueAndTimestamp<GenericRow>> store = stateStore .store(QueryableStoreTypes.timestampedKeyValueStore(), partition); final ValueAndTimestamp<GenericRow> row = store.get(key); if (row == null) { return KsMaterializedQueryResult.rowIterator(Collections.emptyIterator()); } else { return KsMaterializedQueryResult.rowIterator(ImmutableList.of(Row.of( stateStore.schema(), key, row.value(), row.timestamp())).iterator()); } } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldReturnValueIfKeyPresent() { // Given: final GenericRow value = GenericRow.genericRow("col0"); final long rowTime = 2343553L; when(tableStore.get(any())).thenReturn(ValueAndTimestamp.make(value, rowTime)); // When: final Iterator<Row> rowIterator = table.get(A_KEY, PARTITION).rowIterator; // Then: assertThat(rowIterator.hasNext(), is(true)); assertThat(rowIterator.next(), is(Row.of(SCHEMA, A_KEY, value, rowTime))); }
@Override public InetAddress address(String inetHost, ResolvedAddressTypes resolvedAddressTypes) { return firstAddress(addresses(inetHost, resolvedAddressTypes)); }
@Test public void shouldntFindWhenAddressTypeDoesntMatch() { HostsFileEntriesProvider.Parser parser = givenHostsParserWith( LOCALHOST_V4_ADDRESSES, Collections.<String, List<InetAddress>>emptyMap() ); DefaultHostsFileEntriesResolver resolver = new DefaultHostsFileEntriesResolver(parser, ENTRIES_TTL); InetAddress address = resolver.address("localhost", ResolvedAddressTypes.IPV6_ONLY); assertNull(address, "Should pick an IPv6 address"); }
@Override public Optional<String> buildInsertOnDuplicateClause(final DataRecord dataRecord) { // TODO without unique key, job has been interrupted, which may lead to data duplication if (dataRecord.getUniqueKeyValue().isEmpty()) { return Optional.empty(); } StringBuilder result = new StringBuilder("ON CONFLICT ("); PipelineSQLSegmentBuilder sqlSegmentBuilder = new PipelineSQLSegmentBuilder(getType()); result.append(dataRecord.getColumns().stream().filter(Column::isUniqueKey).map(each -> sqlSegmentBuilder.getEscapedIdentifier(each.getName())).collect(Collectors.joining(","))); result.append(") DO UPDATE SET "); result.append(dataRecord.getColumns().stream() .filter(each -> !each.isUniqueKey()).map(each -> sqlSegmentBuilder.getEscapedIdentifier(each.getName()) + "=EXCLUDED." + sqlSegmentBuilder.getEscapedIdentifier(each.getName())) .collect(Collectors.joining(","))); return Optional.of(result.toString()); }
@Test void assertBuildInsertSQLOnDuplicateClause() { String actual = sqlBuilder.buildInsertOnDuplicateClause(mockDataRecord()).orElse(null); assertThat(actual, is("ON CONFLICT (order_id) DO UPDATE SET user_id=EXCLUDED.user_id,status=EXCLUDED.status")); }
@Override public int compareTo(final Host o) { if(protocol.compareTo(o.protocol) < 0) { return -1; } else if(protocol.compareTo(o.protocol) > 0) { return 1; } if(port.compareTo(o.port) < 0) { return -1; } else if(port.compareTo(o.port) > 0) { return 1; } if(hostname.compareTo(o.hostname) < 0) { return -1; } else if(hostname.compareTo(o.hostname) > 0) { return 1; } if(credentials.compareTo(o.credentials) < 0) { return -1; } else if(credentials.compareTo(o.credentials) > 0) { return 1; } return StringUtils.compare(defaultpath, o.defaultpath); }
@Test public void testCompare() { assertEquals(0, new Host(new TestProtocol(Scheme.ftp), "a", 33) .compareTo(new Host(new TestProtocol(Scheme.ftp), "a", 33))); assertEquals(-1, new Host(new TestProtocol(Scheme.ftp), "a", 22) .compareTo(new Host(new TestProtocol(Scheme.ftp), "a", 33))); assertEquals(1, new Host(new TestProtocol(Scheme.ftp), "a", 33) .compareTo(new Host(new TestProtocol(Scheme.ftp), "a", 22))); assertEquals(0, new Host(new TestProtocol(Scheme.sftp), "a", 22, new Credentials("u")) .compareTo(new Host(new TestProtocol(Scheme.sftp), "a", 22, new Credentials("u")))); assertEquals(-1, new Host(new TestProtocol(Scheme.sftp), "a", 22, new Credentials("u")) .compareTo(new Host(new TestProtocol(Scheme.sftp), "a", 22, "/path", new Credentials("u")))); assertEquals(-1, new Host(new TestProtocol(Scheme.sftp), "a", 22, new Credentials()) .compareTo(new Host(new TestProtocol(Scheme.sftp), "a", 22, new Credentials("u")))); assertEquals(1, new Host(new TestProtocol(Scheme.sftp), "a", 22, new Credentials("u")) .compareTo(new Host(new TestProtocol(Scheme.sftp), "a", 22, new Credentials()))); assertEquals(0, new Host(new TestProtocol(Scheme.ftp), "a") .compareTo((new Host(new TestProtocol(Scheme.ftp), "a")))); assertEquals(-1, new Host(new TestProtocol(Scheme.ftp), "a") .compareTo((new Host(new TestProtocol(Scheme.ftp), "b")))); assertEquals(1, new Host(new TestProtocol(Scheme.ftp), "b") .compareTo((new Host(new TestProtocol(Scheme.ftp), "a")))); }
@Override public int availablePermits() { return get(availablePermitsAsync()); }
@Test public void testNotExistent() { RPermitExpirableSemaphore semaphore = redisson.getPermitExpirableSemaphore("testSemaphoreForNPE"); Assertions.assertEquals(0, semaphore.availablePermits()); }
public boolean isRunning() { return transactionStatus == TransactionStatus.PREPARE || transactionStatus == TransactionStatus.PREPARED || transactionStatus == TransactionStatus.COMMITTED; }
@Test public void testIsRunning() { Set<TransactionStatus> nonRunningStatus = new HashSet<>(); nonRunningStatus.add(TransactionStatus.UNKNOWN); nonRunningStatus.add(TransactionStatus.VISIBLE); nonRunningStatus.add(TransactionStatus.ABORTED); UUID uuid = UUID.randomUUID(); for (TransactionStatus status : TransactionStatus.values()) { TransactionState transactionState = new TransactionState(1000L, Lists.newArrayList(20000L, 20001L), 3000, "label123", new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()), LoadJobSourceType.BACKEND_STREAMING, new TxnCoordinator(TxnSourceType.BE, "127.0.0.1"), 50000L, 60 * 1000L); transactionState.setTransactionStatus(status); Assert.assertEquals(nonRunningStatus.contains(status), !transactionState.isRunning()); } }
public int doWork() { final long nowNs = nanoClock.nanoTime(); cachedNanoClock.update(nowNs); dutyCycleTracker.measureAndUpdate(nowNs); int workCount = commandQueue.drain(CommandProxy.RUN_TASK, Configuration.COMMAND_DRAIN_LIMIT); final int bytesReceived = dataTransportPoller.pollTransports(); totalBytesReceived.getAndAddOrdered(bytesReceived); final PublicationImage[] publicationImages = this.publicationImages; for (int lastIndex = publicationImages.length - 1, i = lastIndex; i >= 0; i--) { final PublicationImage image = publicationImages[i]; if (image.isConnected(nowNs)) { image.checkEosForDrainTransition(nowNs); workCount += image.sendPendingStatusMessage(nowNs); workCount += image.processPendingLoss(); workCount += image.initiateAnyRttMeasurements(nowNs); } else { this.publicationImages = 1 == this.publicationImages.length ? EMPTY_IMAGES : ArrayUtil.remove(this.publicationImages, i); image.removeFromDispatcher(); image.receiverRelease(); } } checkPendingSetupMessages(nowNs); if (reResolutionCheckIntervalNs > 0 && (reResolutionDeadlineNs - nowNs) < 0) { reResolutionDeadlineNs = nowNs + reResolutionCheckIntervalNs; dataTransportPoller.checkForReResolutions(nowNs, conductorProxy); } return workCount + bytesReceived; }
@Test void shouldOverwriteHeartbeatWithDataFrame() { receiverProxy.registerReceiveChannelEndpoint(receiveChannelEndpoint); receiver.doWork(); receiverProxy.addSubscription(receiveChannelEndpoint, STREAM_ID); receiver.doWork(); fillSetupFrame(setupHeader); receiveChannelEndpoint.onSetupMessage(setupHeader, setupBuffer, SetupFlyweight.HEADER_LENGTH, senderAddress, 0); final int commandsRead = drainConductorQueue( (e) -> { final PublicationImage image = new PublicationImage( CORRELATION_ID, ctx, receiveChannelEndpoint, 0, senderAddress, SESSION_ID, STREAM_ID, INITIAL_TERM_ID, ACTIVE_TERM_ID, INITIAL_TERM_OFFSET, (short)0, rawLog, mockFeedbackDelayGenerator, POSITIONS, mockHighestReceivedPosition, mockRebuildPosition, SOURCE_IDENTITY, congestionControl); receiverProxy.newPublicationImage(receiveChannelEndpoint, image); }); assertThat(commandsRead, is(1)); receiver.doWork(); fillDataFrame(dataHeader, 0); // heartbeat with same term offset receiveChannelEndpoint.onDataPacket(dataHeader, dataBuffer, dataHeader.frameLength(), senderAddress, 0); fillDataFrame(dataHeader, 0); // initial data frame receiveChannelEndpoint.onDataPacket(dataHeader, dataBuffer, dataHeader.frameLength(), senderAddress, 0); final int readOutcome = TermReader.read( termBuffers[ACTIVE_INDEX], INITIAL_TERM_OFFSET, (buffer, offset, length, header) -> { assertThat(header.type(), is(HeaderFlyweight.HDR_TYPE_DATA)); assertThat(header.termId(), is(ACTIVE_TERM_ID)); assertThat(header.streamId(), is(STREAM_ID)); assertThat(header.sessionId(), is(SESSION_ID)); assertThat(header.termOffset(), is(0)); assertThat(header.frameLength(), is(DataHeaderFlyweight.HEADER_LENGTH + FAKE_PAYLOAD.length)); }, Integer.MAX_VALUE, header, mockErrorHandler, 0, mockSubscriberPosition); assertThat(readOutcome, is(1)); }
@Override public List<PartitionInfo> getPartitions(Table table, List<String> partitionNames) { if (partitionNames == null || partitionNames.isEmpty()) { return Collections.emptyList(); } OdpsTable odpsTable = (OdpsTable) table; List<Partition> partitions = get(partitionCache, OdpsTableName.of(odpsTable.getDbName(), odpsTable.getTableName())); if (partitions == null || partitions.isEmpty()) { return Collections.emptyList(); } Set<String> filter = new HashSet<>(partitionNames); return partitions.stream() .filter(partition -> filter.contains(partition.getPartitionSpec().toString(false, true))) .map(OdpsPartition::new).collect(Collectors.toList()); }
@Test public void testGetPartitions() { Table table = odpsMetadata.getTable("db", "tbl"); List<String> partitionNames = odpsMetadata.listPartitionNames("db", "tbl", TableVersionRange.empty()); List<PartitionInfo> partitions = odpsMetadata.getPartitions(table, partitionNames); Assert.assertEquals(1, partitions.size()); PartitionInfo partitionInfo = partitions.get(0); Assert.assertTrue(partitionInfo.getModifiedTime() > 0); }
@Subscribe @SuppressWarnings("unused") public void handleDataNodeLifeCycleEvent(DataNodeLifecycleEvent event) { switch (event.trigger()) { case REMOVED -> handleNextNode(DataNodeLifecycleTrigger.REMOVE); case STOPPED -> handleNextNode(DataNodeLifecycleTrigger.STOP); } }
@Test public void removedLifecycleEventRemovesNextNode() { DataNodeDto node1 = buildTestNode("node1", DataNodeStatus.REMOVING); nodeService.registerServer(node1); DataNodeDto node2 = buildTestNode("node2", DataNodeStatus.AVAILABLE); nodeService.registerServer(node2); DataNodeDto node3 = buildTestNode("node3", DataNodeStatus.AVAILABLE); nodeService.registerServer(node3); nodeService.update(node2.toBuilder().setActionQueue(DataNodeLifecycleTrigger.REMOVE).build()); nodeService.update(node3.toBuilder().setActionQueue(DataNodeLifecycleTrigger.REMOVE).build()); classUnderTest.handleDataNodeLifeCycleEvent(DataNodeLifecycleEvent.create("node1", DataNodeLifecycleTrigger.REMOVED)); verify(clusterEventBus, times(1)).post(any()); }
@Override public List<ApolloAuditLogDetailsDTO> queryTraceDetails(String traceId) { List<ApolloAuditLogDetailsDTO> detailsDTOList = new ArrayList<>(); logService.findByTraceId(traceId).forEach(log -> { detailsDTOList.add(new ApolloAuditLogDetailsDTO(ApolloAuditUtil.logToDTO(log), ApolloAuditUtil.dataInfluenceListToDTOList( dataInfluenceService.findBySpanId(log.getSpanId())))); }); return detailsDTOList; }
@Test public void testQueryTraceDetails() { final String traceId = "query-trace-id"; final int traceDetailsLength = 3; final int dataInfluenceOfEachLog = 3; { List<ApolloAuditLog> logList = MockBeanFactory.mockAuditLogListByLength(traceDetailsLength); Mockito.when(logService.findByTraceId(Mockito.eq(traceId))) .thenReturn(logList); List<ApolloAuditLogDataInfluence> dataInfluenceList = MockBeanFactory.mockDataInfluenceListByLength(dataInfluenceOfEachLog); Mockito.when(dataInfluenceService.findBySpanId(Mockito.any())) .thenReturn(dataInfluenceList); } List<ApolloAuditLogDetailsDTO> detailsDTOList = api.queryTraceDetails(traceId); Mockito.verify(logService, Mockito.times(1)) .findByTraceId(Mockito.eq(traceId)); Mockito.verify(dataInfluenceService, Mockito.times(3)) .findBySpanId(Mockito.any()); assertEquals(traceDetailsLength, detailsDTOList.size()); assertEquals(dataInfluenceOfEachLog, detailsDTOList.get(0).getDataInfluenceDTOList().size()); }
@Override public MetadataReport getMetadataReport(URL url) { url = url.setPath(MetadataReport.class.getName()).removeParameters(EXPORT_KEY, REFER_KEY); String key = url.toServiceString(NAMESPACE_KEY); MetadataReport metadataReport = serviceStoreMap.get(key); if (metadataReport != null) { return metadataReport; } // Lock the metadata access process to ensure a single instance of the metadata instance lock.lock(); try { metadataReport = serviceStoreMap.get(key); if (metadataReport != null) { return metadataReport; } boolean check = url.getParameter(CHECK_KEY, true) && url.getPort() != 0; try { metadataReport = createMetadataReport(url); } catch (Exception e) { if (!check) { logger.warn(PROXY_FAILED_EXPORT_SERVICE, "", "", "The metadata reporter failed to initialize", e); } else { throw e; } } if (check && metadataReport == null) { throw new IllegalStateException("Can not create metadata Report " + url); } if (metadataReport != null) { serviceStoreMap.put(key, metadataReport); } return metadataReport; } finally { // Release the lock lock.unlock(); } }
@Test void testGetOneMetadataReport() { URL url = URL.valueOf("zookeeper://" + NetUtils.getLocalAddress().getHostName() + ":4444/org.apache.dubbo.TestService?version=1.0.0&application=vic"); MetadataReport metadataReport1 = metadataReportFactory.getMetadataReport(url); MetadataReport metadataReport2 = metadataReportFactory.getMetadataReport(url); Assertions.assertEquals(metadataReport1, metadataReport2); }
public AdSession updateAdSession(AdSession session, Map<String, Object> body) throws AdValidationException { session.setAuthenticationLevel((Integer) body.get("authentication_level")); session.setAuthenticationStatus((String) body.get("authentication_status")); session.setBsn((String) body.get("bsn")); session.setPolymorphIdentity(valueToStringOrNull(body, "polymorph_identity")); session.setPolymorphPseudonym(valueToStringOrNull(body, "polymorph_pseudonym")); BeanPropertyBindingResult result = new BeanPropertyBindingResult(session, "adSession"); ValidationUtils.invokeValidator(new AdSessionValidator(), session, result); if (result.hasErrors()) { throw new AdValidationException("AdSession validation error", result); } adSessionRepository.save(session); return session; }
@Test public void updateAdSession() throws AdValidationException { HashMap<String, Object> body = new HashMap<>(); body.put("authentication_level", 10); body.put("authentication_status", AdAuthenticationStatus.STATUS_SUCCESS.label); body.put("bsn", "PPPPPPPPP"); AdSession result = adService.updateAdSession(new AdSession(), body); assertNotNull(result); assertEquals(result.getAuthenticationLevel(), 10); assertEquals(result.getAuthenticationStatus(), "success"); assertEquals(result.getBsn(), "PPPPPPPPP"); }
public void updateUsedBytes(Map<String, Long> usedBytesOnTiers) { long usedBytes = 0; mUsage.mUsedBytesOnTiers = new HashMap<>(usedBytesOnTiers); for (long t : mUsage.mUsedBytesOnTiers.values()) { usedBytes += t; } mUsage.mUsedBytes = usedBytes; }
@Test public void updateUsedBytes() { assertEquals(Constants.KB * 2L, mInfo.getUsedBytes()); Map<String, Long> usedBytesOnTiers = ImmutableMap.of(Constants.MEDIUM_MEM, Constants.KB * 2L, Constants.MEDIUM_SSD, (long) Constants.KB); mInfo.updateUsedBytes(usedBytesOnTiers); assertEquals(usedBytesOnTiers, mInfo.getUsedBytesOnTiers()); assertEquals(Constants.KB * 3L, mInfo.getUsedBytes()); }
@Override public String toString() { final String escapedPartitionKeys = partitionKeys.stream() .map(EncodingUtils::escapeIdentifier) .collect(Collectors.joining(", ")); final String distributedBy = distribution == null ? "" : distribution.toString(); final String partitionedBy = !partitionKeys.isEmpty() ? String.format("PARTITIONED BY (%s)", escapedPartitionKeys) : ""; final String serializedOptions = options.entrySet().stream() .map( entry -> String.format( " '%s' = '%s'", EncodingUtils.escapeSingleQuotes(entry.getKey()), EncodingUtils.escapeSingleQuotes(entry.getValue()))) .collect(Collectors.joining(String.format(",%n"))); return String.format( "%s%nCOMMENT '%s'%n%s%s%nWITH (%n%s%n)", schema != null ? schema : "", comment != null ? comment : "", distributedBy, partitionedBy, serializedOptions); }
@Test void testDistributedBy() { assertThat(getTableDescriptorBuilder().distributedByHash(3, "f0").build().toString()) .contains("DISTRIBUTED BY HASH(`f0`) INTO 3 BUCKETS\n"); assertThat(getTableDescriptorBuilder().distributedByHash("f0").build().toString()) .contains("DISTRIBUTED BY HASH(`f0`)\n"); assertThat(getTableDescriptorBuilder().distributedByRange(3, "f0").build().toString()) .contains("DISTRIBUTED BY RANGE(`f0`) INTO 3 BUCKETS\n"); assertThat(getTableDescriptorBuilder().distributedByRange("f0").build().toString()) .contains("DISTRIBUTED BY RANGE(`f0`)\n"); assertThat(getTableDescriptorBuilder().distributedBy(3, "f0").build().toString()) .contains("DISTRIBUTED BY (`f0`) INTO 3 BUCKETS\n"); assertThat(getTableDescriptorBuilder().distributedBy("f0").build().toString()) .contains("DISTRIBUTED BY (`f0`)\n"); assertThat(getTableDescriptorBuilder().distributedInto(3).build().toString()) .contains("DISTRIBUTED INTO 3 BUCKETS\n"); }
public int getJobManagerMemoryMB() { return clusterSpecification.getMasterMemoryMB(); }
@Test void testGetJobManagerMemoryMB() { assertThat(kubernetesJobManagerParameters.getJobManagerMemoryMB()) .isEqualTo(JOB_MANAGER_MEMORY); }
public Favorite addFavorite(Favorite favorite) { UserInfo user = userService.findByUserId(favorite.getUserId()); if (user == null) { throw BadRequestException.userNotExists(favorite.getUserId()); } UserInfo loginUser = userInfoHolder.getUser(); //user can only add himself favorite app if (!loginUser.equals(user)) { throw new BadRequestException("add favorite fail. " + "because favorite's user is not current login user."); } Favorite checkedFavorite = favoriteRepository.findByUserIdAndAppId(loginUser.getUserId(), favorite.getAppId()); if (checkedFavorite != null) { return checkedFavorite; } favorite.setPosition(POSITION_DEFAULT); favorite.setDataChangeCreatedBy(user.getUserId()); favorite.setDataChangeLastModifiedBy(user.getUserId()); return favoriteRepository.save(favorite); }
@Test(expected = BadRequestException.class) @Sql(scripts = "/sql/cleanup.sql", executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD) public void testAddFavoriteErrorUser() { String testApp = "testApp"; Favorite favorite = instanceOfFavorite("errorUser", testApp); favoriteService.addFavorite(favorite); }
public void processRequestCommand(final ChannelHandlerContext ctx, final RemotingCommand cmd) { final Pair<NettyRequestProcessor, ExecutorService> matched = this.processorTable.get(cmd.getCode()); final Pair<NettyRequestProcessor, ExecutorService> pair = null == matched ? this.defaultRequestProcessorPair : matched; final int opaque = cmd.getOpaque(); if (pair == null) { String error = " request type " + cmd.getCode() + " not supported"; final RemotingCommand response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.REQUEST_CODE_NOT_SUPPORTED, error); response.setOpaque(opaque); writeResponse(ctx.channel(), cmd, response); log.error(RemotingHelper.parseChannelRemoteAddr(ctx.channel()) + error); return; } Runnable run = buildProcessRequestHandler(ctx, cmd, pair, opaque); if (isShuttingDown.get()) { if (cmd.getVersion() > MQVersion.Version.V5_1_4.ordinal()) { final RemotingCommand response = RemotingCommand.createResponseCommand(ResponseCode.GO_AWAY, "please go away"); response.setOpaque(opaque); writeResponse(ctx.channel(), cmd, response); return; } } if (pair.getObject1().rejectRequest()) { final RemotingCommand response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.SYSTEM_BUSY, "[REJECTREQUEST]system busy, start flow control for a while"); response.setOpaque(opaque); writeResponse(ctx.channel(), cmd, response); return; } try { final RequestTask requestTask = new RequestTask(run, ctx.channel(), cmd); //async execute task, current thread return directly pair.getObject2().submit(requestTask); } catch (RejectedExecutionException e) { if ((System.currentTimeMillis() % 10000) == 0) { log.warn(RemotingHelper.parseChannelRemoteAddr(ctx.channel()) + ", too many requests and system thread pool busy, RejectedExecutionException " + pair.getObject2().toString() + " request code: " + cmd.getCode()); } final RemotingCommand response = RemotingCommand.createResponseCommand(RemotingSysResponseCode.SYSTEM_BUSY, "[OVERLOAD]system busy, start flow control for a while"); response.setOpaque(opaque); writeResponse(ctx.channel(), cmd, response); } catch (Throwable e) { AttributesBuilder attributesBuilder = RemotingMetricsManager.newAttributesBuilder() .put(LABEL_REQUEST_CODE, RemotingHelper.getRequestCodeDesc(cmd.getCode())) .put(LABEL_RESULT, RESULT_PROCESS_REQUEST_FAILED); RemotingMetricsManager.rpcLatency.record(cmd.getProcessTimer().elapsed(TimeUnit.MILLISECONDS), attributesBuilder.build()); } }
@Test public void testProcessRequestCommand() throws InterruptedException { final Semaphore semaphore = new Semaphore(0); RemotingCommand request = RemotingCommand.createRequestCommand(1, null); ResponseFuture responseFuture = new ResponseFuture(null, 1, request, 3000, new InvokeCallback() { @Override public void operationComplete(ResponseFuture responseFuture) { } @Override public void operationSucceed(RemotingCommand response) { assertThat(semaphore.availablePermits()).isEqualTo(0); } @Override public void operationFail(Throwable throwable) { } }, new SemaphoreReleaseOnlyOnce(semaphore)); remotingAbstract.responseTable.putIfAbsent(1, responseFuture); RemotingCommand response = RemotingCommand.createResponseCommand(0, "Foo"); response.setOpaque(1); remotingAbstract.processResponseCommand(null, response); // Acquire the release permit after call back semaphore.acquire(1); assertThat(semaphore.availablePermits()).isEqualTo(0); }
@Override public String toString() { return String.format( "IcebergStagedScan(table=%s, type=%s, taskSetID=%s, caseSensitive=%s)", table(), expectedSchema().asStruct(), taskSetId, caseSensitive()); }
@Test public void testTaskSetLoading() throws NoSuchTableException, IOException { sql("CREATE TABLE %s (id INT, data STRING) USING iceberg", tableName); List<SimpleRecord> records = ImmutableList.of(new SimpleRecord(1, "a"), new SimpleRecord(2, "b")); Dataset<Row> df = spark.createDataFrame(records, SimpleRecord.class); df.writeTo(tableName).append(); Table table = validationCatalog.loadTable(tableIdent); Assert.assertEquals("Should produce 1 snapshot", 1, Iterables.size(table.snapshots())); try (CloseableIterable<FileScanTask> fileScanTasks = table.newScan().planFiles()) { ScanTaskSetManager taskSetManager = ScanTaskSetManager.get(); String setID = UUID.randomUUID().toString(); taskSetManager.stageTasks(table, setID, ImmutableList.copyOf(fileScanTasks)); // load the staged file set Dataset<Row> scanDF = spark .read() .format("iceberg") .option(SparkReadOptions.SCAN_TASK_SET_ID, setID) .load(tableName); // write the records back essentially duplicating data scanDF.writeTo(tableName).append(); } assertEquals( "Should have expected rows", ImmutableList.of(row(1, "a"), row(1, "a"), row(2, "b"), row(2, "b")), sql("SELECT * FROM %s ORDER BY id", tableName)); }
public static RowRanges intersection(RowRanges left, RowRanges right) { RowRanges result = new RowRanges(); int rightIndex = 0; for (Range l : left.ranges) { for (int i = rightIndex, n = right.ranges.size(); i < n; ++i) { Range r = right.ranges.get(i); if (l.isBefore(r)) { break; } else if (l.isAfter(r)) { rightIndex = i + 1; continue; } result.add(Range.intersection(l, r)); } } return result; }
@Test public void testIntersection() { RowRanges ranges1 = buildRanges( 2, 5, 7, 9, 14, 14, 20, 24); RowRanges ranges2 = buildRanges( 1, 2, 6, 7, 9, 9, 11, 12, 14, 15, 21, 22); RowRanges empty = buildRanges(); assertAllRowsEqual(intersection(ranges1, ranges2).iterator(), 2, 7, 9, 14, 21, 22); assertAllRowsEqual(intersection(ranges2, ranges1).iterator(), 2, 7, 9, 14, 21, 22); assertAllRowsEqual(intersection(ranges1, ranges1).iterator(), 2, 3, 4, 5, 7, 8, 9, 14, 20, 21, 22, 23, 24); assertAllRowsEqual(intersection(ranges1, empty).iterator()); assertAllRowsEqual(intersection(empty, ranges1).iterator()); assertAllRowsEqual(intersection(ranges2, ranges2).iterator(), 1, 2, 6, 7, 9, 11, 12, 14, 15, 21, 22); assertAllRowsEqual(intersection(ranges2, empty).iterator()); assertAllRowsEqual(intersection(empty, ranges2).iterator()); assertAllRowsEqual(intersection(empty, empty).iterator()); }
@Udf(description = "Converts the number of milliseconds since 1970-01-01 00:00:00 UTC/GMT into " + "a TIMESTAMP value.") public Timestamp fromUnixTime( @UdfParameter( description = "Milliseconds since" + " January 1, 1970, 00:00:00 GMT.") final Long epochMilli ) { if (epochMilli == null) { return null; } return new Timestamp(epochMilli); }
@Test public void shouldConvertToTimestamp() { // When: final Object result = udf.fromUnixTime(100L); // Then: assertThat(result, is(new Timestamp(100L))); }
@Override public URL getResource(String name) { ClassLoadingStrategy loadingStrategy = getClassLoadingStrategy(name); log.trace("Received request to load resource '{}'", name); for (ClassLoadingStrategy.Source classLoadingSource : loadingStrategy.getSources()) { URL url = null; switch (classLoadingSource) { case APPLICATION: url = super.getResource(name); break; case PLUGIN: url = findResource(name); break; case DEPENDENCIES: url = findResourceFromDependencies(name); break; } if (url != null) { log.trace("Found resource '{}' in {} classpath", name, classLoadingSource); return url; } else { log.trace("Couldn't find resource '{}' in {}", name, classLoadingSource); } } return null; }
@Test void parentLastGetExtensionsIndexExistsInParentAndDependencyAndPlugin() throws URISyntaxException, IOException { URL resource = parentLastPluginClassLoader.getResource(LegacyExtensionFinder.EXTENSIONS_RESOURCE); assertFirstLine("plugin", resource); }
public static MetricsReporter combine(MetricsReporter first, MetricsReporter second) { if (null == first) { return second; } else if (null == second || first == second) { return first; } Set<MetricsReporter> reporters = Sets.newIdentityHashSet(); if (first instanceof CompositeMetricsReporter) { reporters.addAll(((CompositeMetricsReporter) first).reporters()); } else { reporters.add(first); } if (second instanceof CompositeMetricsReporter) { reporters.addAll(((CompositeMetricsReporter) second).reporters()); } else { reporters.add(second); } return new CompositeMetricsReporter(reporters); }
@Test public void combineComposites() { MetricsReporter one = report -> {}; MetricsReporter two = report -> {}; MetricsReporter firstComposite = MetricsReporters.combine(one, LoggingMetricsReporter.instance()); MetricsReporter secondComposite = MetricsReporters.combine(two, LoggingMetricsReporter.instance()); MetricsReporter combined = MetricsReporters.combine(firstComposite, two); assertThat(combined).isInstanceOf(MetricsReporters.CompositeMetricsReporter.class); assertThat(((MetricsReporters.CompositeMetricsReporter) combined).reporters()) .hasSize(3) .containsExactlyInAnyOrder(one, two, LoggingMetricsReporter.instance()); combined = MetricsReporters.combine(firstComposite, secondComposite); assertThat(combined).isInstanceOf(MetricsReporters.CompositeMetricsReporter.class); assertThat(((MetricsReporters.CompositeMetricsReporter) combined).reporters()) .hasSize(3) .containsExactlyInAnyOrder(one, two, LoggingMetricsReporter.instance()); }
public synchronized LogAction record(double... values) { return record(DEFAULT_RECORDER_NAME, timer.monotonicNow(), values); }
@Test public void testNamedLoggersWithoutSpecifiedPrimary() { assertTrue(helper.record("foo", 0).shouldLog()); assertTrue(helper.record("bar", 0).shouldLog()); assertFalse(helper.record("foo", LOG_PERIOD / 2).shouldLog()); assertFalse(helper.record("bar", LOG_PERIOD / 2).shouldLog()); assertTrue(helper.record("foo", LOG_PERIOD).shouldLog()); assertTrue(helper.record("bar", LOG_PERIOD).shouldLog()); assertFalse(helper.record("foo", (LOG_PERIOD * 3) / 2).shouldLog()); assertFalse(helper.record("bar", (LOG_PERIOD * 3) / 2).shouldLog()); assertFalse(helper.record("bar", LOG_PERIOD * 2).shouldLog()); assertTrue(helper.record("foo", LOG_PERIOD * 2).shouldLog()); assertTrue(helper.record("bar", LOG_PERIOD * 2).shouldLog()); }
@Override public void close() { try { if (this.response != null) { HttpClientUtils.closeQuietly(response); } } catch (Exception ex) { // ignore } }
@Test void testCloseResponseWithException() { when(response.getEntity()).thenThrow(new RuntimeException("test")); clientHttpResponse.close(); }
public static Map<Integer, ColumnStatistics> createEmptyColumnStatistics(List<OrcType> orcTypes, int nodeIndex, ColumnWriterOptions columnWriterOptions) { requireNonNull(orcTypes, "orcTypes is null"); checkArgument(nodeIndex >= 0, "Invalid nodeIndex value: %s", nodeIndex); ImmutableMap.Builder<Integer, ColumnStatistics> columnStatistics = ImmutableMap.builder(); LinkedList<Integer> stack = new LinkedList<>(); stack.add(nodeIndex); while (!stack.isEmpty()) { int node = stack.removeLast(); OrcType orcType = orcTypes.get(node); stack.addAll(orcType.getFieldTypeIndexes()); StatisticsBuilder statisticsBuilder = createStatisticsBuilderSupplier(orcType, columnWriterOptions).get(); ColumnStatistics emptyStatistics = statisticsBuilder.buildColumnStatistics(); columnStatistics.put(node, emptyStatistics); } return columnStatistics.build(); }
@Test public void testCreateEmptyColumnStatistics() { ColumnWriterOptions columnWriterOptions = ColumnWriterOptions.builder().setCompressionKind(CompressionKind.ZSTD).build(); Type rootType = rowType(// node index 0 TINYINT, // 1 mapType(TINYINT, SMALLINT), // 2-4, REAL, // 5 mapType(INTEGER, arrayType(BIGINT)), // 6-9 DOUBLE, // 10 arrayType(VARCHAR), // 11-12 TIMESTAMP, // 13 rowType(VARBINARY, rowType(BOOLEAN))); // 14-17 List<OrcType> orcTypes = OrcType.toOrcType(0, rootType); // all CountStatisticsBuilders would usually return ColumnStatistics when no values have been provided Map<Integer, ColumnStatistics> columnStatistics = createEmptyColumnStatistics(orcTypes, 0, columnWriterOptions); assertEquals(columnStatistics.size(), 18); for (int i = 0; i < 18; i++) { ColumnStatistics emptyColumnStatistics = columnStatistics.get(i); assertNotNull(emptyColumnStatistics); assertEquals(emptyColumnStatistics.getNumberOfValues(), 0); } }
@Override public void upgrade() { final MongoCollection<Document> collection = mongoConnection.getMongoDatabase().getCollection(AccessTokenImpl.COLLECTION_NAME); // We use the absence of the "token_type" field as an indicator to select access tokens that need to be encrypted // If we should change the encryption method in the future, we need to adjust the query for (final Document document : collection.find(Filters.exists(AccessTokenImpl.TOKEN_TYPE, false))) { final String tokenId = document.getObjectId("_id").toHexString(); final String tokenName = document.getString(AccessTokenImpl.NAME); final String tokenUsername = document.getString(AccessTokenImpl.USERNAME); final String tokenValue = document.getString(AccessTokenImpl.TOKEN); if (isNullOrEmpty(tokenValue)) { LOG.warn("Couldn't encrypt empty value for access token <{}/{}> of user <{}>", tokenId, tokenName, tokenUsername); continue; } final Bson query = Filters.eq("_id", document.getObjectId("_id")); final Bson updates = Updates.combine( Updates.set(AccessTokenImpl.TOKEN_TYPE, AccessTokenImpl.Type.AES_SIV.getIntValue()), Updates.set(AccessTokenImpl.TOKEN, accessTokenCipher.encrypt(tokenValue)) ); LOG.info("Encrypting access token <{}/{}> for user <{}>", tokenId, tokenName, tokenUsername); final UpdateResult result = collection.updateOne(query, updates); if (result.getModifiedCount() != 1) { LOG.warn("Expected to modify one access token, but <{}> have been updated", result.getModifiedCount()); } } }
@Test @MongoDBFixtures("V20200226181600_EncryptAccessTokensMigrationTest.json") public void upgrade() { final Document plainToken1 = collection.find(Filters.eq("_id", new ObjectId("54e3deadbeefdeadbeef0001"))).first(); final Document plainToken2 = collection.find(Filters.eq("_id", new ObjectId("54e3deadbeefdeadbeef0002"))).first(); final Document plainToken3 = collection.find(Filters.eq("_id", new ObjectId("54e3deadbeefdeadbeef0003"))).first(); assertThat(plainToken1).isNotNull(); assertThat(plainToken2).isNotNull(); assertThat(plainToken3).isNotNull(); migration.upgrade(); final Document encryptedToken1 = collection.find(Filters.eq("_id", new ObjectId("54e3deadbeefdeadbeef0001"))).first(); final Document encryptedToken2 = collection.find(Filters.eq("_id", new ObjectId("54e3deadbeefdeadbeef0002"))).first(); final Document encryptedToken3 = collection.find(Filters.eq("_id", new ObjectId("54e3deadbeefdeadbeef0003"))).first(); assertThat(plainToken1).isNotEqualTo(encryptedToken1); // Must be encrypted, so not equal assertThat(plainToken2).isEqualTo(encryptedToken2); // Already was encrypted, migration shouldn't touch it assertThat(plainToken3).isNotEqualTo(encryptedToken3); // Must be encrypted, so not equal // Newly encrypted token assertThat(encryptedToken1).satisfies(t -> { final Document token = (Document) t; assertThat(token.getString(NAME)).isEqualTo("cli-access"); assertThat(token.getString(USERNAME)).isEqualTo("jane"); assertThat(token.getString(TOKEN)).isEqualTo("cc21d0e8fcbf8c28f8fd56c30e81e5f92cf8bcbf846c69cdcc4eec5ffe64f592bf604141be77e46c819a8997d9d245f1bc9f5f60dc44e490ca6ad07b25d45338efb3bad5"); assertThat(token.getInteger(TOKEN_TYPE)).isEqualTo(AccessTokenImpl.Type.AES_SIV.getIntValue()); assertThat(token.get(LAST_ACCESS)).isEqualTo(DateTime.parse("2020-02-26T21:50:12.454Z").toDate()); }); // Already encrypted token assertThat(encryptedToken2).satisfies(t -> { final Document token = (Document) t; assertThat(token.getString(NAME)).isEqualTo("test-1"); assertThat(token.getString(USERNAME)).isEqualTo("john"); assertThat(token.getString(TOKEN)).isEqualTo("d5f6fc27206946c15f183764c32526674108f9f4"); assertThat(token.getInteger(TOKEN_TYPE)).isEqualTo(AccessTokenImpl.Type.AES_SIV.getIntValue()); assertThat(token.get(LAST_ACCESS)).isEqualTo(DateTime.parse("2020-01-27T16:23:02.758Z").toDate()); }); // Newly encrypted token assertThat(encryptedToken3).satisfies(t -> { final Document token = (Document) t; assertThat(token.getString(NAME)).isEqualTo("test-2"); assertThat(token.getString(USERNAME)).isEqualTo("john"); assertThat(token.getString(TOKEN)).isEqualTo("55acb1b25c787ae1bc91e7eb1694b39277881b4e4643369590c49afd18ac745b4b60ad4aab058dfb4b6f00eba8d30a2c4c188cc9b5832cd9d9620aab82281651797ff3"); assertThat(token.getInteger(TOKEN_TYPE)).isEqualTo(AccessTokenImpl.Type.AES_SIV.getIntValue()); assertThat(token.get(LAST_ACCESS)).isEqualTo(DateTime.parse("2020-01-27T18:23:02.758Z").toDate()); }); }
public static Read read() { return new AutoValue_HCatalogIO_Read.Builder() .setDatabase(DEFAULT_DATABASE) .setPartitionCols(new ArrayList<>()) .build(); }
@Test public void testReadFailureValidationTable() { thrown.expect(IllegalArgumentException.class); thrown.expectMessage("withTable() is required"); HCatalogIO.read() .withConfigProperties(getConfigPropertiesAsMap(service.getHiveConf())) .expand(null); }
public CompletableFuture<Void> complete(URL lra, Exchange exchange) { HttpRequest request = prepareRequest(URI.create(lra.toString() + COORDINATOR_PATH_CLOSE), exchange) .setHeader(CONTENT_TYPE, TEXT_PLAIN_CONTENT) .PUT(HttpRequest.BodyPublishers.ofString("")) .build(); CompletableFuture<HttpResponse<String>> future = client.sendAsync(request, HttpResponse.BodyHandlers.ofString()); return future.thenApply(response -> { if (response.statusCode() != HttpURLConnection.HTTP_OK) { throw new RuntimeCamelException("Cannot complete LRA"); } return null; }); }
@DisplayName("Tests whether LRAClient is calling prepareRequest with exchange from complete()") @Test void testCallsPrepareRequestWithExchangeInComplete() throws MalformedURLException { LRASagaService sagaService = new LRASagaService(); applyMockProperties(sagaService); LRAClient client = new LRAClient(sagaService) { protected HttpRequest.Builder prepareRequest(URI uri, Exchange exchange) { throw new ExchangeRuntimeException(exchange); } }; Exchange exchange = Mockito.mock(Exchange.class); Assertions.assertThrows(ExchangeRuntimeException.class, () -> client.complete(new URL("https://localhost/saga"), exchange)); }
public static List<ACL> parseACLs(String aclString) throws BadAclFormatException { List<ACL> acl = Lists.newArrayList(); if (aclString == null) { return acl; } List<String> aclComps = Lists.newArrayList( Splitter.on(',').omitEmptyStrings().trimResults() .split(aclString)); for (String a : aclComps) { // from ZooKeeperMain private method int firstColon = a.indexOf(':'); int lastColon = a.lastIndexOf(':'); if (firstColon == -1 || lastColon == -1 || firstColon == lastColon) { throw new BadAclFormatException( "ACL '" + a + "' not of expected form scheme:id:perm"); } ACL newAcl = new ACL(); newAcl.setId(new Id(a.substring(0, firstColon), a.substring( firstColon + 1, lastColon))); newAcl.setPerms(getPermFromString(a.substring(lastColon + 1))); acl.add(newAcl); } return acl; }
@Test public void testNullACL() { List<ACL> result = ZKUtil.parseACLs(null); assertTrue(result.isEmpty()); }
public static <K extends WritableComparable, V extends Writable> Writable getEntry(MapFile.Reader[] readers, Partitioner<K, V> partitioner, K key, V value) throws IOException { int readerLength = readers.length; int part; if (readerLength <= 1) { part = 0; } else { part = partitioner.getPartition(key, value, readers.length); } return readers[part].get(key, value); }
@SuppressWarnings("static-access") @Test public void testPartitionerShouldNotBeCalledWhenOneReducerIsPresent() throws Exception { MapFileOutputFormat outputFormat = new MapFileOutputFormat(); Reader reader = Mockito.mock(Reader.class); Reader[] readers = new Reader[]{reader}; outputFormat.getEntry(readers, new MyPartitioner(), new Text(), new Text()); assertTrue(!MyPartitioner.isGetPartitionCalled()); }
public void updateTaskConfig(Map<String, String> taskConfig) { try { taskLifecycleLock.lock(); if (!Objects.equals(this.taskConfigReference, taskConfig)) { logger.info("Updating task '" + name + "' configuration"); taskConfigReference = taskConfig; reconfigurationNeeded = true; } } finally { taskLifecycleLock.unlock(); } }
@Test public void should_reconfigure_task() { assertPolledRecordsSize(CONFIGURED_ITEMS_SIZE); connector.setProperty(ITEMS_SIZE, String.valueOf(5)); assertPolledRecordsSize(CONFIGURED_ITEMS_SIZE); taskRunner.updateTaskConfig(dummyTaskConfig()); assertPolledRecordsSize(5); }
@SuppressWarnings("deprecation") @SneakyThrows(SQLException.class) public static Message convertToProtobufMessage(final Object object) { if (null == object) { return Empty.getDefaultInstance(); } if (object instanceof Integer) { return Int32Value.of((int) object); } if (object instanceof Short) { return Int32Value.of(((Short) object).intValue()); } if (object instanceof Byte) { return Int32Value.of(((Byte) object).intValue()); } if (object instanceof Long) { return Int64Value.of((long) object); } if (object instanceof BigInteger) { return StringValue.of(object.toString()); } if (object instanceof Float) { return FloatValue.of((float) object); } if (object instanceof Double) { return DoubleValue.of((double) object); } if (object instanceof BigDecimal) { return StringValue.of(object.toString()); } if (object instanceof String) { return StringValue.of(object.toString()); } if (object instanceof Boolean) { return BoolValue.of((boolean) object); } if (object instanceof byte[]) { return BytesValue.of(ByteString.copyFrom((byte[]) object)); } if (object instanceof Time) { Time time = (Time) object; LocalTime localTime = LocalTime.of(time.getHours(), time.getMinutes(), time.getSeconds(), new Timestamp(time.getTime()).getNanos()); return Int64Value.of(localTime.toNanoOfDay()); } if (object instanceof java.sql.Date) { return Int64Value.of(((java.sql.Date) object).toLocalDate().toEpochDay()); } if (object instanceof Date) { return converToProtobufTimestamp((Date) object); } if (object instanceof LocalDateTime) { return converToProtobufTimestamp(Timestamp.valueOf((LocalDateTime) object)); } if (object instanceof LocalDate) { return Int64Value.of(((LocalDate) object).toEpochDay()); } if (object instanceof LocalTime) { return Int64Value.of(((LocalTime) object).toNanoOfDay()); } if (object instanceof OffsetDateTime) { LocalDateTime localDateTime = ((OffsetDateTime) object).toLocalDateTime(); return converToProtobufTimestamp(Timestamp.valueOf(localDateTime)); } if (object instanceof OffsetTime) { return Int64Value.of(((OffsetTime) object).toLocalTime().toNanoOfDay()); } if (object instanceof ZonedDateTime) { return converToProtobufTimestamp(Timestamp.valueOf(((ZonedDateTime) object).toLocalDateTime())); } if (object instanceof Instant) { Instant instant = (Instant) object; return com.google.protobuf.Timestamp.newBuilder().setSeconds(instant.getEpochSecond()).setNanos(instant.getNano()).build(); } if (object instanceof Clob) { Clob clob = (Clob) object; return StringValue.of(clob.getSubString(1L, (int) clob.length())); } if (object instanceof Blob) { Blob blob = (Blob) object; return BytesValue.of(ByteString.copyFrom(blob.getBytes(1L, (int) blob.length()))); } return StringValue.newBuilder().setValue(object.toString()).build(); }
@Test void assertConvertToProtobufMessage() { Message actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(null); assertTrue(actualMessage instanceof Empty); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(1); assertTrue(actualMessage instanceof Int32Value); assertThat(((Int32Value) actualMessage).getValue(), is(1)); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage((byte) 1); assertTrue(actualMessage instanceof Int32Value); assertThat(((Int32Value) actualMessage).getValue(), is(1)); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage((short) 1); assertTrue(actualMessage instanceof Int32Value); assertThat(((Int32Value) actualMessage).getValue(), is(1)); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(1L); assertTrue(actualMessage instanceof Int64Value); assertThat(((Int64Value) actualMessage).getValue(), is(1L)); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(new BigInteger("1234")); assertTrue(actualMessage instanceof StringValue); assertThat(new BigInteger(((StringValue) actualMessage).getValue()), is(new BigInteger("1234"))); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(1.0F); assertTrue(actualMessage instanceof FloatValue); assertThat(((FloatValue) actualMessage).getValue(), is(1.0F)); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(1.23); assertTrue(actualMessage instanceof DoubleValue); assertThat(((DoubleValue) actualMessage).getValue(), is(1.23)); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(new BigDecimal("100")); assertTrue(actualMessage instanceof StringValue); assertThat(((StringValue) actualMessage).getValue(), is("100")); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage("abcd"); assertTrue(actualMessage instanceof StringValue); assertThat(((StringValue) actualMessage).getValue(), is("abcd")); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(true); assertTrue(actualMessage instanceof BoolValue); assertTrue(((BoolValue) actualMessage).getValue()); Timestamp now = new Timestamp(System.currentTimeMillis()); long epochSecond = now.toInstant().getEpochSecond(); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(now.toLocalDateTime()); assertTrue(actualMessage instanceof com.google.protobuf.Timestamp); assertThat(((com.google.protobuf.Timestamp) actualMessage).getSeconds(), is(epochSecond)); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(now); assertTrue(actualMessage instanceof com.google.protobuf.Timestamp); assertThat(((com.google.protobuf.Timestamp) actualMessage).getSeconds(), is(epochSecond)); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(new Date(now.getTime())); assertTrue(actualMessage instanceof com.google.protobuf.Timestamp); assertThat(((com.google.protobuf.Timestamp) actualMessage).getSeconds(), is(epochSecond)); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(now.toInstant()); assertTrue(actualMessage instanceof com.google.protobuf.Timestamp); assertThat(((com.google.protobuf.Timestamp) actualMessage).getNanos(), is(now.toInstant().getNano())); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(now.toLocalDateTime().toLocalTime()); assertTrue(actualMessage instanceof Int64Value); assertThat(((Int64Value) actualMessage).getValue(), is(now.toLocalDateTime().toLocalTime().toNanoOfDay())); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage("123456".getBytes()); assertTrue(actualMessage instanceof BytesValue); assertThat(((BytesValue) actualMessage).getValue().toByteArray(), is("123456".getBytes())); OffsetTime offsetTime = OffsetTime.now(); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(offsetTime); assertTrue(actualMessage instanceof Int64Value); assertThat(((Int64Value) actualMessage).getValue(), is(offsetTime.toLocalTime().toNanoOfDay())); OffsetDateTime offsetDateTime = OffsetDateTime.now(); actualMessage = ColumnValueConvertUtils.convertToProtobufMessage(offsetDateTime); assertTrue(actualMessage instanceof com.google.protobuf.Timestamp); assertThat(((com.google.protobuf.Timestamp) actualMessage).getSeconds(), is(offsetDateTime.toEpochSecond())); assertThat(((com.google.protobuf.Timestamp) actualMessage).getNanos(), is(offsetDateTime.getNano())); }
public static void main(String[] args) { if (args.length < 3) { System.out.println("Error: insufficient arguments"); System.out.println(); System.out.println("usage: java -cp jobrunr-${jobrunr.version}.jar org.jobrunr.storage.sql.common.DatabaseCreator {jdbcUrl} {userName} {password} ({tablePrefix})"); return; } String url = args[0]; String userName = args[1]; String password = args[2]; String tablePrefix = args.length >= 4 ? args[3] : null; try { System.out.println("=========================================================="); System.out.println("================== JobRunr Table Creator ================="); System.out.println("=========================================================="); new DatabaseCreator(() -> DriverManager.getConnection(url, userName, password), tablePrefix, new SqlStorageProviderFactory().getStorageProviderClassByJdbcUrl(url)).runMigrations(); System.out.println("Successfully created all tables!"); } catch (Exception e) { System.out.println("An error occurred: "); StringWriter sw = new StringWriter(); e.printStackTrace(new PrintWriter(sw)); String exceptionAsString = sw.toString(); System.out.println(exceptionAsString); } }
@Test void testSqlLiteMigrationsUsingMainMethod() { assertThatCode(() -> DatabaseCreator.main(new String[]{"jdbc:sqlite:" + SQLITE_DB1, "", ""})).doesNotThrowAnyException(); }
public static ParameterizedType parameterize(final Class<?> raw, final Type... typeArguments) { checkParameterizeMethodParameter(raw, typeArguments); return new ParameterizedTypeImpl(raw, raw.getEnclosingClass(), typeArguments); }
@Test void testParameterizeForDiffLength() { assertThrows(IllegalArgumentException.class, () -> { TypeUtils.parameterize(List.class, String.class, Integer.class); }); }
public void isEmpty() { if (actual == null) { failWithActual(simpleFact("expected an empty string")); } else if (!actual.isEmpty()) { failWithActual(simpleFact("expected to be empty")); } }
@Test public void stringIsEmptyFailNull() { expectFailureWhenTestingThat(null).isEmpty(); assertFailureKeys("expected an empty string", "but was"); }
public static String getDescription(String descriptionTemplate, Map<String, String> params) { assertParamsMatchWithDescription(descriptionTemplate, params); String description = descriptionTemplate; for (String param : getParams(descriptionTemplate)) { String value = params.get(param); description = description.replace(String.format("<%s>", param), value); } return description; }
@Test void testGetDescriptionForTemplate() { String description = "test description with param <key1>, <key2> and <key3>."; Map<String, String> params = new HashMap<>(); params.put("key1", "value1"); params.put("key2", "value2"); params.put("key3", "value3"); Assertions.assertEquals( "test description with param value1, value2 and value3.", ExceptionParamsUtil.getDescription(description, params)); params.remove("key2"); Assertions.assertThrows( IllegalArgumentException.class, () -> ExceptionParamsUtil.getDescription(description, params)); }
public static <T> PTransform<PCollection<T>, PCollection<T>> unionAll( PCollection<T> rightCollection) { checkNotNull(rightCollection, "rightCollection argument is null"); return new SetImpl<>(rightCollection, unionAll()); }
@Test @Category(NeedsRunner.class) public void testUnionAll() { PAssert.that(first.apply("strings", Sets.unionAll(second))) .containsInAnyOrder( "a", "a", "a", "a", "a", "b", "b", "b", "b", "b", "c", "c", "d", "d", "d", "d", "e", "e", "f", "f", "g", "g", "h", "h"); PCollection<Row> results = firstRows.apply("rows", Sets.unionAll(secondRows)); PAssert.that(results) .containsInAnyOrder( toRows( "a", "a", "a", "a", "a", "b", "b", "b", "b", "b", "c", "c", "d", "d", "d", "d", "e", "e", "f", "f", "g", "g", "h", "h")); assertEquals(schema, results.getSchema()); p.run(); }
@Override public Output run(RunContext runContext) throws Exception { String renderedNamespace = runContext.render(this.namespace); FlowService flowService = ((DefaultRunContext) runContext).getApplicationContext().getBean(FlowService.class); flowService.checkAllowedNamespace(runContext.tenantId(), renderedNamespace, runContext.tenantId(), runContext.flowInfo().namespace()); String renderedPrefix = runContext.render(this.prefix); List<String> keys = runContext.namespaceKv(renderedNamespace).list().stream() .map(KVEntry::key) .filter(key -> key.startsWith(renderedPrefix)) .toList(); return Output.builder() .keys(keys) .build(); }
@Test void shouldGetKeysGivenMatchingPrefix() throws Exception { // Given String namespace = IdUtils.create(); RunContext runContext = this.runContextFactory.of(Map.of( "flow", Map.of("namespace", namespace), "inputs", Map.of( "prefix", TEST_KEY_PREFIX_TEST ) )); GetKeys getKeys = GetKeys.builder() .id(GetKeys.class.getSimpleName()) .type(GetKeys.class.getName()) .prefix("{{ inputs.prefix }}") .build(); final KVStore kv = runContext.namespaceKv(namespace); kv.put(TEST_KEY_PREFIX_TEST + "-key", new KVValueAndMetadata(null, "value")); kv.put(TEST_KEY_PREFIX_TEST + "-second-key", new KVValueAndMetadata(null, "value")); kv.put("another-key", new KVValueAndMetadata(null, "value")); // When GetKeys.Output run = getKeys.run(runContext); // Then assertThat(run.getKeys(), containsInAnyOrder(TEST_KEY_PREFIX_TEST + "-key", TEST_KEY_PREFIX_TEST + "-second-key")); }
@SuppressWarnings("unchecked") @Override public Throwable deSerialize() { SerializedException cause = getCause(); SerializedExceptionProtoOrBuilder p = viaProto ? proto : builder; Class<?> realClass = null; try { realClass = Class.forName(p.getClassName()); } catch (ClassNotFoundException e) { throw new YarnRuntimeException(e); } Class classType = null; if (YarnException.class.isAssignableFrom(realClass)) { classType = YarnException.class; } else if (IOException.class.isAssignableFrom(realClass)) { classType = IOException.class; } else if (RuntimeException.class.isAssignableFrom(realClass)) { classType = RuntimeException.class; } else { classType = Throwable.class; } return instantiateException(realClass.asSubclass(classType), getMessage(), cause == null ? null : cause.deSerialize()); }
@Test void testDeserialize() throws Exception { Exception ex = new Exception("test exception"); SerializedExceptionPBImpl pb = new SerializedExceptionPBImpl(); try { pb.deSerialize(); fail("deSerialize should throw YarnRuntimeException"); } catch (YarnRuntimeException e) { assertEquals(ClassNotFoundException.class, e.getCause().getClass()); } pb.init(ex); assertEquals(ex.toString(), pb.deSerialize().toString()); }
@Override public boolean localMember() { return localMember; }
@Test public void testConstructor_withLiteMember_isTrue() { MemberImpl member = new MemberImpl.Builder(address) .version(MemberVersion.of("3.8.0")) .localMember(true) .uuid(newUnsecureUUID()) .liteMember(true) .build(); assertBasicMemberImplFields(member); assertTrue(member.localMember()); assertTrue(member.isLiteMember()); }
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) { CharStream input = CharStreams.fromString(source); FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input ); CommonTokenStream tokens = new CommonTokenStream( lexer ); FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens ); ParserHelper parserHelper = new ParserHelper(eventsManager); additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol())); parser.setHelper(parserHelper); parser.setErrorHandler( new FEELErrorHandler() ); parser.removeErrorListeners(); // removes the error listener that prints to the console parser.addErrorListener( new FEELParserErrorListener( eventsManager ) ); // pre-loads the parser with symbols defineVariables( inputVariableTypes, inputVariables, parser ); if (typeRegistry != null) { parserHelper.setTypeRegistry(typeRegistry); } return parser; }
@Test void ifExpression() { String inputExpression = "if applicant.age < 18 then \"declined\" else \"accepted\""; BaseNode ifBase = parse( inputExpression ); assertThat( ifBase).isInstanceOf(IfExpressionNode.class); assertThat( ifBase.getText()).isEqualTo(inputExpression); assertThat( ifBase.getResultType()).isEqualTo(BuiltInType.STRING); IfExpressionNode ifExpr = (IfExpressionNode) ifBase; assertThat( ifExpr.getCondition().getText()).isEqualTo( "applicant.age < 18"); assertThat( ifExpr.getThenExpression().getText()).isEqualTo( "\"declined\""); assertThat( ifExpr.getElseExpression().getText()).isEqualTo( "\"accepted\""); }
@Udf public <T> List<T> intersect( @UdfParameter(description = "First array of values") final List<T> left, @UdfParameter(description = "Second array of values") final List<T> right) { if (left == null || right == null) { return null; } final Set<T> intersection = Sets.newLinkedHashSet(left); intersection.retainAll(Sets.newHashSet(right)); return Lists.newArrayList(intersection); }
@Test public void shouldReturnNullForNullInputs() { final List<Long> result = udf.intersect((List<Long>) null, (List<Long>) null); assertThat(result, is(nullValue())); }
public static Mode parse(String value) { if (StringUtils.isBlank(value)) { throw new IllegalArgumentException(ExceptionMessage.INVALID_MODE.getMessage(value)); } try { return parseNumeric(value); } catch (NumberFormatException e) { // Treat as symbolic return parseSymbolic(value); } }
@Test public void numerics() { Mode parsed = ModeParser.parse("777"); assertEquals(Mode.Bits.ALL, parsed.getOwnerBits()); assertEquals(Mode.Bits.ALL, parsed.getGroupBits()); assertEquals(Mode.Bits.ALL, parsed.getOtherBits()); parsed = ModeParser.parse("755"); assertEquals(Mode.Bits.ALL, parsed.getOwnerBits()); assertEquals(Mode.Bits.READ_EXECUTE, parsed.getGroupBits()); assertEquals(Mode.Bits.READ_EXECUTE, parsed.getOtherBits()); parsed = ModeParser.parse("644"); assertEquals(Mode.Bits.READ_WRITE, parsed.getOwnerBits()); assertEquals(Mode.Bits.READ, parsed.getGroupBits()); assertEquals(Mode.Bits.READ, parsed.getOtherBits()); }
public UserGroupDto addMembership(String groupUuid, String userUuid) { try (DbSession dbSession = dbClient.openSession(false)) { UserDto userDto = findUserOrThrow(userUuid, dbSession); GroupDto groupDto = findNonDefaultGroupOrThrow(groupUuid, dbSession); UserGroupDto userGroupDto = new UserGroupDto().setGroupUuid(groupUuid).setUserUuid(userUuid); checkArgument(isNotInGroup(dbSession, groupUuid, userUuid), "User '%s' is already a member of group '%s'", userDto.getLogin(), groupDto.getName()); userGroupDao.insert(dbSession, userGroupDto, groupDto.getName(), userDto.getLogin()); dbSession.commit(); return userGroupDto; } }
@Test public void addMembership_ifGroupAndUserNotFound_shouldThrow() { assertThatExceptionOfType(NotFoundException.class) .isThrownBy(() -> groupMembershipService.addMembership(GROUP_A, USER_1)) .withMessage("User 'user_1' not found"); }
public static boolean canProduceEmptyMatches(final Pattern<?, ?> pattern) { NFAFactoryCompiler<?> compiler = new NFAFactoryCompiler<>(checkNotNull(pattern)); compiler.compileFactory(); State<?> startState = compiler.getStates().stream() .filter(State::isStart) .findFirst() .orElseThrow( () -> new IllegalStateException( "Compiler produced no start state. It is a bug. File a jira.")); Set<State<?>> visitedStates = new HashSet<>(); final Stack<State<?>> statesToCheck = new Stack<>(); statesToCheck.push(startState); while (!statesToCheck.isEmpty()) { final State<?> currentState = statesToCheck.pop(); if (visitedStates.contains(currentState)) { continue; } else { visitedStates.add(currentState); } for (StateTransition<?> transition : currentState.getStateTransitions()) { if (transition.getAction() == StateTransitionAction.PROCEED) { if (transition.getTargetState().isFinal()) { return true; } else { statesToCheck.push(transition.getTargetState()); } } } } return false; }
@Test public void testCheckingEmptyMatches() { assertThat(NFACompiler.canProduceEmptyMatches(Pattern.begin("a").optional()), is(true)); assertThat( NFACompiler.canProduceEmptyMatches(Pattern.begin("a").oneOrMore().optional()), is(true)); assertThat( NFACompiler.canProduceEmptyMatches( Pattern.begin("a").oneOrMore().optional().next("b").optional()), is(true)); assertThat(NFACompiler.canProduceEmptyMatches(Pattern.begin("a")), is(false)); assertThat(NFACompiler.canProduceEmptyMatches(Pattern.begin("a").oneOrMore()), is(false)); assertThat( NFACompiler.canProduceEmptyMatches( Pattern.begin("a").oneOrMore().next("b").optional()), is(false)); }
public byte[] signDataWithPrivateKey(byte[] data) throws UnrecoverableKeyException, KeyStoreException, NoSuchAlgorithmException, InvalidKeyException, SignatureException { if( pvtKeyStore == null ) { throw new RuntimeException( "Key store with private key not configured. Please configure it properly before using signed serialization." ); } PrivateKey pvtkey = (PrivateKey) pvtKeyStore.getKey( pvtKeyAlias, pvtKeyPassword ); Signature sig = Signature.getInstance( SHA512WITH_RSA ); sig.initSign( pvtkey ); sig.update( data ); return sig.sign(); }
@Test public void testSignDataWithPrivateKey() throws UnsupportedEncodingException, UnrecoverableKeyException, InvalidKeyException, KeyStoreException, NoSuchAlgorithmException, SignatureException { // The server signs the data with the private key // Set properties to simulate the server final URL serverKeyStoreURL = getClass().getResource(KEYSTORE_SERVER_RESOURCE_NAME); System.setProperty( KeyStoreConstants.PROP_SIGN, Boolean.TRUE.toString() ); System.setProperty( KeyStoreConstants.PROP_PVT_KS_URL, serverKeyStoreURL.toExternalForm() ); System.setProperty(KeyStoreConstants.PROP_PVT_KS_PWD, KEYSTORE_SERVER_PASSWORD); System.setProperty( KeyStoreConstants.PROP_PVT_ALIAS, KEY_ALIAS ); System.setProperty( KeyStoreConstants.PROP_PVT_PWD, KEY_PASSWORD ); final KeyStoreHelper serverHelper = new KeyStoreHelper(); // get some data to sign final byte[] data = "Hello World".getBytes("UTF8" ); // sign the data final byte[] signature = serverHelper.signDataWithPrivateKey(data ); // now, initialise the client helper // Set properties to simulate the client final URL clientKeyStoreURL = getClass().getResource(KEYSTORE_CLIENT_RESOURCE_NAME ); System.setProperty( KeyStoreConstants.PROP_SIGN, Boolean.TRUE.toString() ); System.setProperty( KeyStoreConstants.PROP_PUB_KS_URL, clientKeyStoreURL.toExternalForm() ); System.setProperty( KeyStoreConstants.PROP_PUB_KS_PWD, KEYSTORE_CLIENT_PASSWORD ); // client needs no password to access the certificate and public key final KeyStoreHelper clientHelper = new KeyStoreHelper( ); // check the signature against the data assertThat(clientHelper.checkDataWithPublicKey(KEY_ALIAS, data, signature)).isTrue(); // check some fake data assertThat(clientHelper.checkDataWithPublicKey(KEY_ALIAS, "fake".getBytes("UTF8"), signature)).isFalse(); }
@Override public double d(BitSet x, BitSet y) { if (x.size() != y.size()) { throw new IllegalArgumentException(String.format("BitSets have different length: x[%d], y[%d]", x.size(), y.size())); } int dist = 0; for (int i = 0; i < x.size(); i++) { if (x.get(i) != y.get(i)) dist++; } return dist; }
@Test public void testDistance() { System.out.println("distance"); int x = 0x5D; int y = 0x49; assertEquals(2, HammingDistance.d(x, y)); }
public SelectorData obtainSelectorData(final String pluginName, final String path) { final Map<String, SelectorData> lruMap = SELECTOR_DATA_MAP.get(pluginName); return Optional.ofNullable(lruMap).orElse(Maps.newHashMap()).get(path); }
@Test public void testObtainSelectorData() throws NoSuchFieldException, IllegalAccessException { SelectorData firstSelectorData = SelectorData.builder().id("1").pluginName(mockPluginName1).sort(1).build(); ConcurrentHashMap<String, WindowTinyLFUMap<String, SelectorData>> selectorMap = getFieldByName(selectorMapStr); selectorMap.put(mockPluginName1, new WindowTinyLFUMap<>(100, 100, Boolean.FALSE)); selectorMap.get(mockPluginName1).put(path1, firstSelectorData); SelectorData firstSelectorDataCache = MatchDataCache.getInstance().obtainSelectorData(mockPluginName1, path1); assertEquals(firstSelectorData, firstSelectorDataCache); selectorMap.clear(); }
public String prettyHexDump() { final ByteBuf buffer = Unpooled.buffer(20); try { buffer.writeShort(version()); buffer.writeShort(count()); buffer.writeInt(Math.toIntExact(sysUptime())); buffer.writeInt(Math.toIntExact(unixSecs())); buffer.writeInt(Math.toIntExact(sequence())); buffer.writeInt(Math.toIntExact(sourceId())); return ByteBufUtil.prettyHexDump(buffer); } finally { ReferenceCountUtil.release(buffer); } }
@Test public void prettyHexDump() { final NetFlowV9Header header = NetFlowV9Header.create(5, 23, 42L, 1000L, 1L, 1L); assertThat(header.prettyHexDump()).isNotEmpty(); }
@Override public Collection<String> getJdbcUrlPrefixes() { return Collections.singletonList("jdbc:p6spy:mysql:"); }
@Test void assertGetJdbcUrlPrefixes() { assertThat(TypedSPILoader.getService(DatabaseType.class, "P6spyMySQL").getJdbcUrlPrefixes(), is(Collections.singletonList("jdbc:p6spy:mysql:"))); }
public boolean isValid(String value) { if (value == null) { return false; } URI uri; // ensure value is a valid URI try { uri = new URI(value); } catch (URISyntaxException e) { return false; } // OK, perfom additional validation String scheme = uri.getScheme(); if (!isValidScheme(scheme)) { return false; } String authority = uri.getRawAuthority(); if ("file".equals(scheme) && (authority == null || "".equals(authority))) { // Special case - file: allows an empty authority return true; // this is a local file - nothing more to do here } else if ("file".equals(scheme) && authority != null && authority.contains(":")) { return false; } else { // Validate the authority if (!isValidAuthority(authority)) { return false; } } if (!isValidPath(uri.getRawPath())) { return false; } if (!isValidQuery(uri.getRawQuery())) { return false; } if (!isValidFragment(uri.getRawFragment())) { return false; } return true; }
@Test public void testValidator288() { UrlValidator validator = new UrlValidator(UrlValidator.ALLOW_LOCAL_URLS); assertTrue("hostname should validate", validator.isValid("http://hostname")); assertTrue("hostname with path should validate", validator.isValid("http://hostname/test/index.html")); assertTrue("localhost URL should validate", validator.isValid("http://localhost/test/index.html")); assertFalse("first.my-testing should not validate", validator.isValid("http://first.my-testing/test/index.html")); assertFalse("broke.hostname should not validate", validator.isValid("http://broke.hostname/test/index.html")); assertTrue("www.apache.org should still validate", validator.isValid("http://www.apache.org/test/index.html")); // Turn it off, and check validator = new UrlValidator(0); assertFalse("hostname should no longer validate", validator.isValid("http://hostname")); assertFalse("localhost URL should no longer validate", validator.isValid("http://localhost/test/index.html")); assertTrue("www.apache.org should still validate", validator.isValid("http://www.apache.org/test/index.html")); }
protected static VplsOperation getOptimizedVplsOperation(Deque<VplsOperation> operations) { if (operations.isEmpty()) { return null; } // no need to optimize if the queue contains only one operation if (operations.size() == 1) { return operations.getFirst(); } final VplsOperation firstOperation = operations.peekFirst(); final VplsOperation lastOperation = operations.peekLast(); final VplsOperation.Operation firstOp = firstOperation.op(); final VplsOperation.Operation lastOp = lastOperation.op(); if (firstOp.equals(VplsOperation.Operation.REMOVE)) { if (lastOp.equals(VplsOperation.Operation.REMOVE)) { // case 1: both first and last operation are REMOVE; do remove return firstOperation; } else if (lastOp.equals(VplsOperation.Operation.ADD)) { // case 2: if first is REMOVE, and last is ADD; do update return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.UPDATE); } else { // case 3: first is REMOVE, last is UPDATE; do update return lastOperation; } } else if (firstOp.equals(VplsOperation.Operation.ADD)) { if (lastOp.equals(VplsOperation.Operation.REMOVE)) { // case 4: first is ADD, last is REMOVE; nothing to do return null; } else if (lastOp.equals(VplsOperation.Operation.ADD)) { // case 5: both first and last are ADD, do add return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.ADD); } else { // case 6: first is ADD and last is update, do add return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.ADD); } } else { if (lastOp.equals(VplsOperation.Operation.REMOVE)) { // case 7: last is remove, do remove return lastOperation; } else if (lastOp.equals(VplsOperation.Operation.ADD)) { // case 8: do update only return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.UPDATE); } else { // case 9: from UPDATE to UPDATE // only need last UPDATE operation return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.UPDATE); } } }
@Test public void testOptimizeOperationsUToU() { Deque<VplsOperation> operations = new ArrayDeque<>(); VplsData vplsData = VplsData.of(VPLS1); vplsData.addInterfaces(ImmutableSet.of(V100H1)); VplsOperation vplsOperation = VplsOperation.of(vplsData, VplsOperation.Operation.UPDATE); operations.add(vplsOperation); vplsData = VplsData.of(VPLS1, EncapsulationType.VLAN); vplsData.addInterfaces(ImmutableSet.of(V100H1, V100H2)); vplsOperation = VplsOperation.of(vplsData, VplsOperation.Operation.UPDATE); operations.add(vplsOperation); vplsOperation = VplsOperationManager.getOptimizedVplsOperation(operations); assertEquals(VplsOperation.of(vplsData, VplsOperation.Operation.UPDATE), vplsOperation); }
public static byte[] zip(List<ZipItem> source) { byte[] result = null; try (ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); ZipOutputStream zipOut = new ZipOutputStream( byteOut)) { for (ZipItem item : source) { zipOut.putNextEntry(new ZipEntry(item.getItemName())); zipOut.write(item.getItemData().getBytes(StandardCharsets.UTF_8)); } zipOut.flush(); zipOut.finish(); result = byteOut.toByteArray(); } catch (IOException e) { LOGGER.error("an error occurred while compressing data.", e); } return result; }
@Test void testZip() { List<ZipUtils.ZipItem> zipItemList = new ArrayList<>(); zipItemList.add(new ZipUtils.ZipItem("test", "content")); byte[] zip = ZipUtils.zip(zipItemList); assertTrue(zip != null && zip.length > 0); }
public static long fix(FileSystem fs, Path dir, Class<? extends Writable> keyClass, Class<? extends Writable> valueClass, boolean dryrun, Configuration conf) throws Exception { String dr = (dryrun ? "[DRY RUN ] " : ""); Path data = new Path(dir, DATA_FILE_NAME); Path index = new Path(dir, INDEX_FILE_NAME); int indexInterval = conf.getInt(Writer.INDEX_INTERVAL, 128); if (!fs.exists(data)) { // there's nothing we can do to fix this! throw new Exception(dr + "Missing data file in " + dir + ", impossible to fix this."); } if (fs.exists(index)) { // no fixing needed return -1; } SequenceFile.Reader dataReader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(data)); if (!dataReader.getKeyClass().equals(keyClass)) { throw new Exception(dr + "Wrong key class in " + dir + ", expected" + keyClass.getName() + ", got " + dataReader.getKeyClass().getName()); } if (!dataReader.getValueClass().equals(valueClass)) { throw new Exception(dr + "Wrong value class in " + dir + ", expected" + valueClass.getName() + ", got " + dataReader.getValueClass().getName()); } long cnt = 0L; Writable key = ReflectionUtils.newInstance(keyClass, conf); Writable value = ReflectionUtils.newInstance(valueClass, conf); SequenceFile.Writer indexWriter = null; if (!dryrun) { indexWriter = SequenceFile.createWriter(conf, SequenceFile.Writer.file(index), SequenceFile.Writer.keyClass(keyClass), SequenceFile.Writer.valueClass (LongWritable.class)); } try { /** What's the position (in bytes) we wrote when we got the last index */ long lastIndexPos = -1; /** * What was size when we last wrote an index. Set to MIN_VALUE to ensure * that we have an index at position zero - midKey will throw an exception * if this is not the case */ long lastIndexKeyCount = Long.MIN_VALUE; long pos = dataReader.getPosition(); LongWritable position = new LongWritable(); long nextBlock = pos; boolean blockCompressed = dataReader.isBlockCompressed(); while(dataReader.next(key, value)) { if (blockCompressed) { long curPos = dataReader.getPosition(); if (curPos > nextBlock) { pos = nextBlock; // current block position nextBlock = curPos; } } // Follow the same logic as in // {@link MapFile.Writer#append(WritableComparable, Writable)} if (cnt >= lastIndexKeyCount + indexInterval && pos > lastIndexPos) { position.set(pos); if (!dryrun) { indexWriter.append(key, position); } lastIndexPos = pos; lastIndexKeyCount = cnt; } if (!blockCompressed) { pos = dataReader.getPosition(); // next record position } cnt++; } } catch(Throwable t) { // truncated data file. swallow it. } dataReader.close(); if (!dryrun) indexWriter.close(); return cnt; }
@Test public void testFix() { final String INDEX_LESS_MAP_FILE = "testFix.mapfile"; int PAIR_SIZE = 20; MapFile.Writer writer = null; try { FileSystem fs = FileSystem.getLocal(conf); Path dir = new Path(TEST_DIR, INDEX_LESS_MAP_FILE); writer = createWriter(INDEX_LESS_MAP_FILE, IntWritable.class, Text.class); for (int i = 0; i < PAIR_SIZE; i++) writer.append(new IntWritable(0), new Text("value")); writer.close(); File indexFile = new File(".", "." + INDEX_LESS_MAP_FILE + "/index"); boolean isDeleted = false; if (indexFile.exists()) isDeleted = indexFile.delete(); if (isDeleted) assertTrue("testFix error !!!", MapFile.fix(fs, dir, IntWritable.class, Text.class, true, conf) == PAIR_SIZE); } catch (Exception ex) { fail("testFix error !!!"); } finally { IOUtils.cleanupWithLogger(LOG, writer); } }
public static String from(Query query) { if (query instanceof SqmInterpretationsKey.InterpretationsKeySource && query instanceof QueryImplementor && query instanceof QuerySqmImpl) { QueryInterpretationCache.Key cacheKey = SqmInterpretationsKey.createInterpretationsKey((SqmInterpretationsKey.InterpretationsKeySource) query); QuerySqmImpl querySqm = (QuerySqmImpl) query; Supplier buildSelectQueryPlan = () -> ReflectionUtils.invokeMethod(querySqm, "buildSelectQueryPlan"); SelectQueryPlan plan = cacheKey != null ? ((QueryImplementor) query).getSession().getFactory().getQueryEngine() .getInterpretationCache() .resolveSelectQueryPlan(cacheKey, buildSelectQueryPlan) : (SelectQueryPlan) buildSelectQueryPlan.get(); if (plan instanceof ConcreteSqmSelectQueryPlan) { ConcreteSqmSelectQueryPlan selectQueryPlan = (ConcreteSqmSelectQueryPlan) plan; Object cacheableSqmInterpretation = ReflectionUtils.getFieldValueOrNull(selectQueryPlan, "cacheableSqmInterpretation"); if (cacheableSqmInterpretation == null) { DomainQueryExecutionContext domainQueryExecutionContext = DomainQueryExecutionContext.class.cast(querySqm); cacheableSqmInterpretation = ReflectionUtils.invokeStaticMethod( ReflectionUtils.getMethod( ConcreteSqmSelectQueryPlan.class, "buildCacheableSqmInterpretation", SqmSelectStatement.class, DomainParameterXref.class, DomainQueryExecutionContext.class ), ReflectionUtils.getFieldValueOrNull(selectQueryPlan, "sqm"), ReflectionUtils.getFieldValueOrNull(selectQueryPlan, "domainParameterXref"), domainQueryExecutionContext ); } if (cacheableSqmInterpretation != null) { JdbcSelect jdbcSelect = ReflectionUtils.getFieldValueOrNull(cacheableSqmInterpretation, "jdbcSelect"); if (jdbcSelect != null) { return jdbcSelect.getSql(); } } } } return ReflectionUtils.invokeMethod(query, "getQueryString"); }
@Test public void testJPQL() { doInJPA(entityManager -> { Query jpql = entityManager .createQuery( "select " + " YEAR(p.createdOn) as year, " + " count(p) as postCount " + "from " + " Post p " + "group by " + " YEAR(p.createdOn)", Tuple.class); String sql = SQLExtractor.from(jpql); assertNotNull(sql); LOGGER.info( "The JPQL query: [\n{}\n]\ngenerates the following SQL query: [\n{}\n]", jpql.unwrap(org.hibernate.query.Query.class).getQueryString(), sql ); }); }
static Schema toGenericAvroSchema( String schemaName, List<TableFieldSchema> fieldSchemas, @Nullable String namespace) { String nextNamespace = namespace == null ? null : String.format("%s.%s", namespace, schemaName); List<Field> avroFields = new ArrayList<>(); for (TableFieldSchema bigQueryField : fieldSchemas) { avroFields.add(convertField(bigQueryField, nextNamespace)); } return Schema.createRecord( schemaName, "Translated Avro Schema for " + schemaName, namespace == null ? "org.apache.beam.sdk.io.gcp.bigquery" : namespace, false, avroFields); }
@Test public void testConvertBigQuerySchemaToAvroSchema() { TableSchema tableSchema = new TableSchema(); tableSchema.setFields(fields); Schema avroSchema = BigQueryAvroUtils.toGenericAvroSchema("testSchema", tableSchema.getFields()); assertThat(avroSchema.getField("number").schema(), equalTo(Schema.create(Type.LONG))); assertThat( avroSchema.getField("species").schema(), equalTo(Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.STRING)))); assertThat( avroSchema.getField("quality").schema(), equalTo(Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.DOUBLE)))); assertThat( avroSchema.getField("quantity").schema(), equalTo(Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.LONG)))); assertThat( avroSchema.getField("birthday").schema(), equalTo( Schema.createUnion( Schema.create(Type.NULL), LogicalTypes.timestampMicros().addToSchema(Schema.create(Type.LONG))))); assertThat( avroSchema.getField("birthdayMoney").schema(), equalTo( Schema.createUnion( Schema.create(Type.NULL), LogicalTypes.decimal(38, 9).addToSchema(Schema.create(Type.BYTES))))); assertThat( avroSchema.getField("lotteryWinnings").schema(), equalTo( Schema.createUnion( Schema.create(Type.NULL), LogicalTypes.decimal(77, 38).addToSchema(Schema.create(Type.BYTES))))); assertThat( avroSchema.getField("flighted").schema(), equalTo(Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.BOOLEAN)))); assertThat( avroSchema.getField("sound").schema(), equalTo(Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.BYTES)))); assertThat( avroSchema.getField("anniversaryDate").schema(), equalTo(Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.STRING)))); assertThat( avroSchema.getField("anniversaryDatetime").schema(), equalTo(Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.STRING)))); assertThat( avroSchema.getField("anniversaryTime").schema(), equalTo(Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.STRING)))); Schema geoSchema = Schema.create(Type.STRING); geoSchema.addProp(LogicalType.LOGICAL_TYPE_PROP, "geography_wkt"); assertThat( avroSchema.getField("geoPositions").schema(), equalTo(Schema.createUnion(Schema.create(Type.NULL), geoSchema))); assertThat( avroSchema.getField("scion").schema(), equalTo( Schema.createUnion( Schema.create(Type.NULL), Schema.createRecord( "scion", "Translated Avro Schema for scion", "org.apache.beam.sdk.io.gcp.bigquery", false, ImmutableList.of( new Field( "species", Schema.createUnion( Schema.create(Type.NULL), Schema.create(Type.STRING)), null, (Object) null)))))); assertThat( avroSchema.getField("associates").schema(), equalTo( Schema.createArray( Schema.createRecord( "associates", "Translated Avro Schema for associates", "org.apache.beam.sdk.io.gcp.bigquery", false, ImmutableList.of( new Field( "species", Schema.createUnion( Schema.create(Type.NULL), Schema.create(Type.STRING)), null, (Object) null)))))); }
@Override public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) { int time = payload.getByteBuf().readUnsignedMedium(); if (0x800000 == time) { return MySQLTimeValueUtils.ZERO_OF_TIME; } MySQLFractionalSeconds fractionalSeconds = new MySQLFractionalSeconds(columnDef.getColumnMeta(), payload); int hour = (time >> 12) % (1 << 10); int minute = (time >> 6) % (1 << 6); int second = time % (1 << 6); return LocalTime.of(hour, minute, second).withNano(fractionalSeconds.getNanos()); }
@Test void assertReadWithFraction3() { columnDef.setColumnMeta(3); when(payload.getByteBuf()).thenReturn(byteBuf); when(byteBuf.readUnsignedShort()).thenReturn(9000); when(byteBuf.readUnsignedMedium()).thenReturn(0x800000 | (0x10 << 12) | (0x08 << 6) | 0x04); assertThat(new MySQLTime2BinlogProtocolValue().read(columnDef, payload), is(LocalTime.of(16, 8, 4).withNano(900000000))); }
@Override public void close() { if (initialized) { destroyFn.accept(ctx); } }
@Test public void when_streamingSourceClosesBuffer_then_fails() { StreamSource<Integer> source = SourceBuilder .stream("src", ctx -> null) .<Integer>fillBufferFn((src, buffer) -> buffer.close()) .distributed(1) // we use this to avoid forceTotalParallelismOne .build(); assertThatThrownBy(() -> TestSupport .verifyProcessor(((StreamSourceTransform<Integer>) source).metaSupplierFn.apply(noEventTime())) .expectOutput(Collections.emptyList())) .hasMessageContaining("streaming source must not close the buffer"); }
public Optional<PushEventDto> raiseEventOnIssue(String projectUuid, DefaultIssue currentIssue) { var currentIssueComponentUuid = currentIssue.componentUuid(); if (currentIssueComponentUuid == null) { return Optional.empty(); } var component = treeRootHolder.getComponentByUuid(currentIssueComponentUuid); if (isTaintVulnerability(currentIssue)) { return raiseTaintVulnerabilityEvent(projectUuid, component, currentIssue); } if (isSecurityHotspot(currentIssue)) { return raiseSecurityHotspotEvent(projectUuid, component, currentIssue); } return Optional.empty(); }
@Test public void raiseEventOnIssue_whenNewHotspot_shouldCreateRaisedEvent() { DefaultIssue defaultIssue = createDefaultIssue() .setType(RuleType.SECURITY_HOTSPOT) .setStatus(Issue.STATUS_TO_REVIEW) .setNew(true) .setRuleDescriptionContextKey(randomAlphabetic(6)); assertThat(underTest.raiseEventOnIssue("some-project-uuid", defaultIssue)) .isNotEmpty() .hasValueSatisfying(pushEventDto -> { assertThat(pushEventDto.getName()).isEqualTo(SecurityHotspotRaised.EVENT_NAME); verifyHotspotRaisedEventPayload(pushEventDto.getPayload(), defaultIssue); assertThat(pushEventDto.getLanguage()).isEqualTo("java"); assertThat(pushEventDto.getProjectUuid()).isEqualTo("some-project-uuid"); }); }
public Map<String, Parameter> generateMergedWorkflowParams( WorkflowInstance instance, RunRequest request) { Workflow workflow = instance.getRuntimeWorkflow(); Map<String, ParamDefinition> allParamDefs = new LinkedHashMap<>(); Map<String, ParamDefinition> defaultWorkflowParams = defaultParamManager.getDefaultWorkflowParams(); // merge workflow params for start if (request.isFreshRun()) { // merge default workflow params ParamsMergeHelper.mergeParams( allParamDefs, defaultWorkflowParams, ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.SYSTEM_DEFAULT, request)); // merge defined workflow params if (workflow.getParams() != null) { ParamsMergeHelper.mergeParams( allParamDefs, workflow.getParams(), ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.DEFINITION, request)); } } // merge workflow params from previous instance for restart if (!request.isFreshRun() && instance.getParams() != null) { Map<String, ParamDefinition> previousParamDefs = instance.getParams().entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toDefinition())); // remove reserved params, which should be injected again by the system. for (String paramName : Constants.RESERVED_PARAM_NAMES) { previousParamDefs.remove(paramName); } ParamsMergeHelper.mergeParams( allParamDefs, previousParamDefs, ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.SYSTEM, false)); } // merge run params if (request.getRunParams() != null) { ParamSource source = getParamSource(request.getInitiator(), request.isFreshRun()); ParamsMergeHelper.mergeParams( allParamDefs, request.getRunParams(), ParamsMergeHelper.MergeContext.workflowCreate(source, request)); } // merge user provided restart run params getUserRestartParam(request) .ifPresent( userRestartParams -> { ParamSource source = getParamSource(request.getInitiator(), request.isFreshRun()); ParamsMergeHelper.mergeParams( allParamDefs, userRestartParams, ParamsMergeHelper.MergeContext.workflowCreate(source, request)); }); // cleanup any placeholder params and convert to params return ParamsMergeHelper.convertToParameters(ParamsMergeHelper.cleanupParams(allParamDefs)); }
@Test public void testCalculateUserDefinedSelParams() { paramsManager = new ParamsManager(defaultsManager); Step step = Mockito.mock(Step.class); when(step.getType()).thenReturn(StepType.TITUS); RunProperties runProperties = new RunProperties(); runProperties.setOwner(User.builder().name("demo").build()); workflowInstance.setRunProperties(runProperties); RunRequest request = RunRequest.builder() .initiator(new ManualInitiator()) .currentPolicy(RunPolicy.START_FRESH_NEW_RUN) .runParams( Collections.singletonMap( "workflow_run_policy", StringParamDefinition.builder() .expression("return params.getFromInstance('RUN_POLICY');") .build())) .build(); Map<String, Parameter> workflowParams = paramsManager.generateMergedWorkflowParams(workflowInstance, request); paramExtensionRepo.reset( Collections.emptyMap(), Collections.emptyMap(), InstanceWrapper.from(workflowInstance, request)); paramEvaluator.evaluateWorkflowParameters(workflowParams, workflow.getId()); paramExtensionRepo.clear(); Assert.assertEquals("UTC", workflowParams.get("WORKFLOW_CRON_TIMEZONE").asString()); Assert.assertEquals("UTC", workflowParams.get("DSL_DEFAULT_TZ").asString()); Assert.assertEquals("demo", workflowParams.get("owner").asString()); // get run policy Assert.assertEquals( "START_FRESH_NEW_RUN", workflowParams.get("workflow_run_policy").asString()); }
public List<VespaService> getMonitoringServices(String service) { if (service.equalsIgnoreCase(ALL_SERVICES)) return services; List<VespaService> myServices = new ArrayList<>(); for (VespaService s : services) { log.log(FINE, () -> "getMonitoringServices. service=" + service + ", checking against " + s + ", which has monitoring name " + s.getMonitoringName()); if (s.getMonitoringName().id.equalsIgnoreCase(service)) { myServices.add(s); } } return myServices; }
@Test public void all_services_can_be_retrieved_by_using_special_name() { List<VespaService> dummyServices = List.of( new DummyService(0, "dummy/id/0")); VespaServices services = new VespaServices(dummyServices); assertEquals(1, services.getMonitoringServices(ALL_SERVICES).size()); }
@Override public void publishLong(MetricDescriptor descriptor, long value) { publishNumber(descriptor, value, LONG); }
@Test public void when_singleMetricWithModule() throws Exception { MetricDescriptor descriptor = newDescriptor() .withMetric("c") .withTag("tag1", "a") .withTag("module", MODULE_NAME); jmxPublisher.publishLong(descriptor, 1L); helper.assertMBeans(singletonList( metric(domainPrefix + "." + MODULE_NAME + ":type=Metrics,instance=inst1,tag0=\"tag1=a\"", singletonList(longValue("c", 1L))))); }
private PlantUmlDiagram createDiagram(List<String> rawDiagramLines) { List<String> diagramLines = filterOutComments(rawDiagramLines); Set<PlantUmlComponent> components = parseComponents(diagramLines); PlantUmlComponents plantUmlComponents = new PlantUmlComponents(components); List<ParsedDependency> dependencies = parseDependencies(plantUmlComponents, diagramLines); return new PlantUmlDiagram.Builder(plantUmlComponents) .withDependencies(dependencies) .build(); }
@Test public void does_not_include_dependency_descriptions() { PlantUmlDiagram diagram = createDiagram(TestDiagram.in(temporaryFolder) .component("component").withStereoTypes("..somePackage..") .component("otherComponent").withStereoTypes("..somePackage2..") .rawLine("[component] --> [otherComponent] : this part should be ignored, no matter the comment tick ' ") .write()); PlantUmlComponent component = getComponentWithName("component", diagram); PlantUmlComponent targetOfDescribedDependency = getOnlyElement(component.getDependencies()); assertThat(targetOfDescribedDependency.getComponentName()) .as("target of dependency with description") .isEqualTo(new ComponentName("otherComponent")); }
@Override public NotificationPluginInfo pluginInfoFor(GoPluginDescriptor descriptor) { PluggableInstanceSettings pluggableInstanceSettings = getPluginSettingsAndView(descriptor, extension); return new NotificationPluginInfo(descriptor, pluggableInstanceSettings); }
@Test public void shouldBuildPluginInfo() { GoPluginDescriptor descriptor = GoPluginDescriptor.builder().id("plugin1").build(); NotificationPluginInfo pluginInfo = new NotificationPluginInfoBuilder(extension).pluginInfoFor(descriptor); List<PluginConfiguration> pluginConfigurations = List.of( new PluginConfiguration("username", new Metadata(true, false)), new PluginConfiguration("password", new Metadata(true, true)) ); PluginView pluginView = new PluginView("some-html"); assertThat(pluginInfo.getDescriptor(), is(descriptor)); assertThat(pluginInfo.getExtensionName(), is("notification")); assertThat(pluginInfo.getPluginSettings(), is(new PluggableInstanceSettings(pluginConfigurations, pluginView))); }
public static ScenarioBeanWrapper<?> navigateToObject(Object rootObject, List<String> steps) { return navigateToObject(rootObject, steps, true); }
@Test public void navigateToObjectNoStepCreationTest() { Dispute dispute = new Dispute(); List<String> pathToProperty = List.of("creator", "firstName"); String message = "Impossible to reach field firstName because a step is not instantiated"; assertThatThrownBy(() -> ScenarioBeanUtil.navigateToObject(dispute, pathToProperty, false)) .isInstanceOf(ScenarioException.class) .hasMessage(message); }
public boolean hasScheme() { return mUri.getScheme() != null; }
@Test public void hasScheme() { assertFalse(new AlluxioURI("/").hasScheme()); assertTrue(new AlluxioURI("file:/").hasScheme()); assertTrue(new AlluxioURI("file://localhost/").hasScheme()); assertTrue(new AlluxioURI("file://localhost:8080/").hasScheme()); assertFalse(new AlluxioURI("//localhost:8080/").hasScheme()); }
@Override public byte[] serialize() { final byte[] data = new byte[LENGTH + PADDING_LENGTH]; final ByteBuffer bb = ByteBuffer.wrap(data); bb.put(PADDING); return data; }
@Test public void serialize() { assertArrayEquals(data, TERMINATOR_TLV.serialize()); }
@Operation(summary = "Create the signature") @PostMapping(value = { Constants.URL_OLD_RDW_SIGNATURE, Constants.URL_RDW_SIGNATURE }, consumes = "application/json", produces = "application/json") public SignatureResponse getDigitalSignatureRestService(@Valid @RequestBody SignatureRequest request, @RequestHeader(value = "X-FORWARDED-FOR") String clientIp) { return rdwService.getDigitalSignatureRestService(request, clientIp); }
@Test public void getDigitalSignatureRestServiceTest() { SignatureResponse expectedResponse = new SignatureResponse(); when(rdwServiceMock.getDigitalSignatureRestService(any(SignatureRequest.class), anyString())).thenReturn(expectedResponse); SignatureResponse actualResponse = rdwController.getDigitalSignatureRestService(new SignatureRequest(), ""); assertEquals(expectedResponse, actualResponse); }
@SuppressWarnings("argument") static Status runSqlLine( String[] args, @Nullable InputStream inputStream, @Nullable OutputStream outputStream, @Nullable OutputStream errorStream) throws IOException { String[] modifiedArgs = checkConnectionArgs(args); SqlLine sqlLine = new SqlLine(); if (outputStream != null) { sqlLine.setOutputStream(new PrintStream(outputStream, false, StandardCharsets.UTF_8.name())); } if (errorStream != null) { sqlLine.setErrorStream(new PrintStream(errorStream, false, StandardCharsets.UTF_8.name())); } return sqlLine.begin(modifiedArgs, inputStream, true); }
@Test public void testSqlLine_select() throws Exception { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); String[] args = buildArgs("SELECT 3, 'hello', DATE '2018-05-28';"); BeamSqlLine.runSqlLine(args, null, byteArrayOutputStream, null); List<List<String>> lines = toLines(byteArrayOutputStream); assertThat( Arrays.asList(Arrays.asList("3", "hello", "2018-05-28")), everyItem(is(oneOf(lines.toArray())))); }
@Override public void write(int b) throws IOException { if (buffer.length <= bufferIdx) { flushInternalBuffer(); } buffer[bufferIdx] = (byte) b; ++bufferIdx; }
@Test void testFailingSecondaryWriteArrayOffsFail() throws Exception { DuplicatingCheckpointOutputStream duplicatingStream = createDuplicatingStreamWithFailingSecondary(); testFailingSecondaryStream( duplicatingStream, () -> duplicatingStream.write(new byte[512], 20, 130)); }
public B serialization(String serialization) { this.serialization = serialization; return getThis(); }
@Test void serialization() { ServiceBuilder builder = new ServiceBuilder(); builder.serialization("serialization"); Assertions.assertEquals("serialization", builder.build().getSerialization()); }
@Override public SpanCustomizer annotate(String value) { return tracer.currentSpanCustomizer().annotate(value); }
@Test void annotate_when_no_current_span() { spanCustomizer.annotate("foo"); }
NewExternalIssue mapResult(String driverName, @Nullable Result.Level ruleSeverity, @Nullable Result.Level ruleSeverityForNewTaxonomy, Result result) { NewExternalIssue newExternalIssue = sensorContext.newExternalIssue(); newExternalIssue.type(DEFAULT_TYPE); newExternalIssue.engineId(driverName); newExternalIssue.severity(toSonarQubeSeverity(ruleSeverity)); newExternalIssue.ruleId(requireNonNull(result.getRuleId(), "No ruleId found for issue thrown by driver " + driverName)); newExternalIssue.cleanCodeAttribute(DEFAULT_CLEAN_CODE_ATTRIBUTE); newExternalIssue.addImpact(DEFAULT_SOFTWARE_QUALITY, toSonarQubeImpactSeverity(ruleSeverityForNewTaxonomy)); mapLocations(result, newExternalIssue); return newExternalIssue; }
@Test public void mapResult_whenStacksLocationExists_createsCodeFlowFileLocation_no_text_messages() { Location stackFrameLocationWithoutMessage = new Location().withMessage(new Message().withId("1")); result.withStacks(Set.of(new Stack().withFrames(List.of(new StackFrame().withLocation(stackFrameLocationWithoutMessage))))); var newIssueLocationCall2 = mock(NewIssueLocation.class); when(mockExternalIssue.newLocation()).thenReturn(newExternalIssueLocation, newIssueLocationCall2); NewExternalIssue newExternalIssue = resultMapper.mapResult(DRIVER_NAME, WARNING, WARNING, result); verify(newExternalIssue).addFlow(List.of(newIssueLocationCall2)); verify(newIssueLocationCall2, never()).message(anyString()); }
public String getContextPath() { return contextPath; }
@Test public void context_path_is_configured() { settings.setProperty(CONTEXT_PROPERTY, "/my_path"); assertThat(underTest().getContextPath()).isEqualTo("/my_path"); }
public static boolean reserved(Uuid uuid) { return uuid.getMostSignificantBits() == 0 && uuid.getLeastSignificantBits() < 100; }
@Test void testLostIsReserved() { assertTrue(DirectoryId.reserved(DirectoryId.LOST)); }
@Override public FSDataOutputStream create(final Path f, final FsPermission permission, final boolean overwrite, final int bufferSize, final short replication, final long blockSize, final Progressable progress) throws IOException { return super.create(fullPath(f), permission, overwrite, bufferSize, replication, blockSize, progress); }
@Test public void testURIEmptyPath() throws IOException { Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); URI chrootUri = URI.create("mockfs://foo"); new ChRootedFileSystem(chrootUri, conf); }
public static MetricName name(Class<?> klass, String... names) { return name(klass.getName(), names); }
@Test public void elidesEmptyStringsFromNames() throws Exception { assertThat(name("one", "", "three")) .isEqualTo(MetricName.build("one.three")); }
public static String toCamelCase(CharSequence name) { return toCamelCase(name, CharUtil.UNDERLINE); }
@Test public void toCamelCaseTest() { Dict.create() .set("Table_Test_Of_day","tableTestOfDay") .set("TableTestOfDay","TableTestOfDay") .set("abc_1d","abc1d") .forEach((key, value) -> assertEquals(value, NamingCase.toCamelCase(key))); }
@Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { if (executor.isShutdown()) { return; } BlockingQueue<Runnable> workQueue = executor.getQueue(); Runnable firstWork = workQueue.poll(); boolean newTaskAdd = workQueue.offer(r); if (firstWork != null) { firstWork.run(); } if (!newTaskAdd) { executor.execute(r); } }
@Test public void testRejectedExecutionWhenATaskIsInTheQueueTheExecutorShouldExecute() { when(threadPoolExecutor.isShutdown()).thenReturn(false); when(threadPoolExecutor.getQueue()).thenReturn(workQueue); when(workQueue.poll()).thenReturn(runnableInTheQueue); when(workQueue.offer(runnable)).thenReturn(false); runsOldestTaskPolicy.rejectedExecution(runnable, threadPoolExecutor); verify(runnableInTheQueue).run(); verify(threadPoolExecutor).execute(runnable); verify(runnable, never()).run(); }
@Override public void notifyAfterCompleted() { }
@Test public void notifyAfterCompleted() { provider.notifyAfterCompleted(); }
@Override synchronized Set<TopicPartition> partitions() { return wrapped.partitions(); }
@Test public void testPartitions() { final Set<TopicPartition> partitions = Collections.singleton(new TopicPartition("topic", 0)); when(wrapped.partitions()).thenReturn(partitions); final Set<TopicPartition> result = synchronizedPartitionGroup.partitions(); assertEquals(partitions, result); verify(wrapped, times(1)).partitions(); }
public String render(Object o) { StringBuilder result = new StringBuilder(template.length()); render(o, result); return result.toString(); }
@Test public void canSubstituteValuesFromLists() { Template template = new Template("Hello {{#getValues}}{{toString}},{{/getValues}} "); assertEquals("Hello 1,2,3, ", template.render(foo)); }
IdBatchAndWaitTime newIdBaseLocal(int batchSize) { return newIdBaseLocal(Clock.currentTimeMillis(), getNodeId(), batchSize); }
@Test public void when_maximumAllowedFuturePlusOne_then_1msWaitTime() { int batchSize = (int) (IDS_PER_SECOND * DEFAULT_ALLOWED_FUTURE_MILLIS) + IDS_PER_SECOND; IdBatchAndWaitTime result = gen.newIdBaseLocal(1516028439000L, 1234, batchSize); assertEquals(1, result.waitTimeMillis); }
@Override public Result invoke(Invocation invocation) throws RpcException { // When broadcasting, it should be called remotely. if (isBroadcast()) { if (logger.isDebugEnabled()) { logger.debug("Performing broadcast call for method: " + RpcUtils.getMethodName(invocation) + " of service: " + getUrl().getServiceKey()); } return invoker.invoke(invocation); } if (peerFlag) { if (logger.isDebugEnabled()) { logger.debug("Performing point-to-point call for method: " + RpcUtils.getMethodName(invocation) + " of service: " + getUrl().getServiceKey()); } // If it's a point-to-point direct connection, invoke the original Invoker return invoker.invoke(invocation); } if (isInjvmExported()) { if (logger.isDebugEnabled()) { logger.debug("Performing local JVM call for method: " + RpcUtils.getMethodName(invocation) + " of service: " + getUrl().getServiceKey()); } // If it's exported to the local JVM, invoke the corresponding Invoker return injvmInvoker.invoke(invocation); } if (logger.isDebugEnabled()) { logger.debug("Performing remote call for method: " + RpcUtils.getMethodName(invocation) + " of service: " + getUrl().getServiceKey()); } // Otherwise, delegate the invocation to the original Invoker return invoker.invoke(invocation); }
@Test void testScopeNull_RemoteInvoke() { URL url = URL.valueOf("remote://1.2.3.4/" + DemoService.class.getName()); url = url.addParameter(REFER_KEY, URL.encode(PATH_KEY + "=" + DemoService.class.getName())); url = url.setScopeModel(ApplicationModel.defaultModel().getDefaultModule()); Invoker<DemoService> cluster = getClusterInvoker(url); invokers.add(cluster); // Configured with mock RpcInvocation invocation = new RpcInvocation(); invocation.setMethodName("doSomething1"); Result ret = cluster.invoke(invocation); Assertions.assertEquals("doSomething1", ret.getValue()); }
public BootstrapMetadata copyWithOnlyVersion() { ApiMessageAndVersion versionRecord = null; for (ApiMessageAndVersion record : records) { if (recordToMetadataVersion(record.message()).isPresent()) { versionRecord = record; } } if (versionRecord == null) { throw new RuntimeException("No FeatureLevelRecord for " + MetadataVersion.FEATURE_NAME + " was found in " + source); } return new BootstrapMetadata(Collections.singletonList(versionRecord), metadataVersion, source); }
@Test public void testCopyWithOnlyVersion() { assertEquals(new BootstrapMetadata(SAMPLE_RECORDS1.subList(2, 3), IBP_3_3_IV2, "baz"), BootstrapMetadata.fromRecords(SAMPLE_RECORDS1, "baz").copyWithOnlyVersion()); }
public void notifyKvStateUnregistered( JobVertexID jobVertexId, KeyGroupRange keyGroupRange, String registrationName) { KvStateLocation location = lookupTable.get(registrationName); if (location != null) { // Duplicate name if vertex IDs don't match if (!location.getJobVertexId().equals(jobVertexId)) { throw new IllegalArgumentException( "Another operator (" + location.getJobVertexId() + ") registered the KvState " + "under '" + registrationName + "'."); } location.unregisterKvState(keyGroupRange); if (location.getNumRegisteredKeyGroups() == 0) { lookupTable.remove(registrationName); } } else { throw new IllegalArgumentException( "Unknown registration name '" + registrationName + "'. " + "Probably registration/unregistration race."); } }
@Test void testUnregisterBeforeRegister() throws Exception { ExecutionJobVertex vertex = createJobVertex(4); Map<JobVertexID, ExecutionJobVertex> vertexMap = createVertexMap(vertex); KvStateLocationRegistry registry = new KvStateLocationRegistry(new JobID(), vertexMap); assertThatThrownBy( () -> registry.notifyKvStateUnregistered( vertex.getJobVertexId(), new KeyGroupRange(0, 0), "any-name")) .withFailMessage( "Did not throw expected Exception, because of missing registration") .isInstanceOf(IllegalArgumentException.class); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStartBounds, final Range<Instant> windowEndBounds, final Optional<Position> position ) { try { final ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>> store = stateStore .store(QueryableStoreTypes.timestampedWindowStore(), partition); final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds); final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds); try (WindowStoreIterator<ValueAndTimestamp<GenericRow>> it = cacheBypassFetcher.fetch(store, key, lower, upper)) { final Builder<WindowedRow> builder = ImmutableList.builder(); while (it.hasNext()) { final KeyValue<Long, ValueAndTimestamp<GenericRow>> next = it.next(); final Instant windowStart = Instant.ofEpochMilli(next.key); if (!windowStartBounds.contains(windowStart)) { continue; } final Instant windowEnd = windowStart.plus(windowSize); if (!windowEndBounds.contains(windowEnd)) { continue; } final TimeWindow window = new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli()); final WindowedRow row = WindowedRow.of( stateStore.schema(), new Windowed<>(key, window), next.value.value(), next.value.timestamp() ); builder.add(row); } return KsMaterializedQueryResult.rowIterator(builder.build().iterator()); } } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldReturnValuesForClosedStartBounds() { // Given: final Range<Instant> start = Range.closed( NOW, NOW.plusSeconds(10) ); when(fetchIterator.hasNext()) .thenReturn(true) .thenReturn(true) .thenReturn(false); when(fetchIterator.next()) .thenReturn(new KeyValue<>(start.lowerEndpoint().toEpochMilli(), VALUE_1)) .thenReturn(new KeyValue<>(start.upperEndpoint().toEpochMilli(), VALUE_2)) .thenThrow(new AssertionError()); when(cacheBypassFetcher.fetch(eq(tableStore), any(), any(), any())).thenReturn(fetchIterator); // When: final Iterator<WindowedRow> rowIterator = table.get(A_KEY, PARTITION, start, Range.all()).rowIterator; // Then: assertThat(rowIterator.hasNext(), is(true)); final List<WindowedRow> resultList = Lists.newArrayList(rowIterator); assertThat(resultList, contains( WindowedRow.of( SCHEMA, windowedKey(start.lowerEndpoint()), VALUE_1.value(), VALUE_1.timestamp() ), WindowedRow.of( SCHEMA, windowedKey(start.upperEndpoint()), VALUE_2.value(), VALUE_2.timestamp() ) )); }
public static Integer parseRestBindPortFromWebInterfaceUrl(String webInterfaceUrl) { if (webInterfaceUrl != null) { final int lastColon = webInterfaceUrl.lastIndexOf(':'); if (lastColon == -1) { return -1; } else { try { return Integer.parseInt(webInterfaceUrl.substring(lastColon + 1)); } catch (NumberFormatException e) { return -1; } } } else { return -1; } }
@Test void testParseRestBindPortFromWebInterfaceUrlWithValidPort() { assertThat(ResourceManagerUtils.parseRestBindPortFromWebInterfaceUrl("localhost:8080")) .isEqualTo(8080); }
public DoubleArrayAsIterable usingTolerance(double tolerance) { return new DoubleArrayAsIterable(tolerance(tolerance), iterableSubject()); }
@Test public void usingTolerance_containsNoneOf_primitiveDoubleArray_success() { assertThat(array(1.1, TOLERABLE_2POINT2, 3.3)) .usingTolerance(DEFAULT_TOLERANCE) .containsNoneOf(array(99.99, 999.999)); }
@Nullable @Override public Message decode(@Nonnull RawMessage rawMessage) { final byte[] payload = rawMessage.getPayload(); final Map<String, Object> event; try { event = objectMapper.readValue(payload, TypeReferences.MAP_STRING_OBJECT); } catch (IOException e) { LOG.error("Couldn't decode raw message {}", rawMessage); return null; } return parseEvent(event); }
@Test public void decodeMessagesHandlesGenericBeatWithDocker() throws Exception { final Message message = codec.decode(messageFromJson("generic-with-docker.json")); assertThat(message).isNotNull(); assertThat(message.getMessage()).isEqualTo("null"); assertThat(message.getSource()).isEqualTo("unknown"); assertThat(message.getTimestamp()).isEqualTo(new DateTime(2016, 4, 1, 0, 0, DateTimeZone.UTC)); assertThat(message.getField("facility")).isEqualTo("genericbeat"); assertThat(message.getField("beat_foo")).isEqualTo("bar"); assertThat(message.getField("beat_docker_id")).isEqualTo("123"); assertThat(message.getField("beat_docker_name")).isEqualTo("container-1"); assertThat(message.getField("beat_docker_labels_docker-kubernetes-pod")).isEqualTo("hello"); }