focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public V load(K key) { awaitSuccessfulInit(); try (SqlResult queryResult = sqlService.execute(queries.load(), key)) { Iterator<SqlRow> it = queryResult.iterator(); V value = null; if (it.hasNext()) { SqlRow sqlRow = it.next(); if (it.hasNext()) { throw new IllegalStateException("multiple matching rows for a key " + key); } // If there is a single column as the value, return that column as the value if (queryResult.getRowMetadata().getColumnCount() == 2 && genericMapStoreProperties.singleColumnAsValue) { value = sqlRow.getObject(1); } else { //noinspection unchecked value = (V) toGenericRecord(sqlRow, genericMapStoreProperties); } } return value; } }
@Test public void givenTypeName_whenLoad_thenReturnGenericRecordWithCorrectTypeName() { ObjectSpec spec = objectProvider.createObject(mapName, false); objectProvider.insertItems(spec, 1); Properties properties = new Properties(); properties.setProperty(DATA_CONNECTION_REF_PROPERTY, TEST_DATABASE_REF); properties.setProperty(TYPE_NAME_PROPERTY, "my.Person"); mapLoader = createMapLoader(properties, hz); CompactGenericRecord genericRecord = (CompactGenericRecord) mapLoader.load(0); assertThat(genericRecord.getSchema().getTypeName()).isEqualTo("my.Person"); }
public static ConsumerCreationStrategyFactory create(PulsarConsumer pulsarConsumer) { validate(pulsarConsumer); return new ConsumerCreationStrategyFactory(pulsarConsumer); }
@Test public void givenPulsarConsumerAndRetryPolicyNonNullwhenICreateFactoryverifyIllegalArgumentExceptionIsNotThrown() { ConsumerCreationStrategyFactory factory = ConsumerCreationStrategyFactory.create(mock(PulsarConsumer.class)); assertNotNull(factory); }
@Override public Consumer<Packet> get() { return responseHandler; }
@Test public void get_whenMultipleResponseThreads() { supplier = newSupplier(2); assertInstanceOf(AsyncMultithreadedResponseHandler.class, supplier.get()); }
@Override public ObjectNode encode(Instruction instruction, CodecContext context) { checkNotNull(instruction, "Instruction cannot be null"); return new EncodeInstructionCodecHelper(instruction, context).encode(); }
@Test public void modIPv6SrcInstructionTest() { final Ip6Address ip = Ip6Address.valueOf("1111::2222"); final L3ModificationInstruction.ModIPInstruction instruction = (L3ModificationInstruction.ModIPInstruction) Instructions.modL3IPv6Src(ip); final ObjectNode instructionJson = instructionCodec.encode(instruction, context); assertThat(instructionJson, matchesInstruction(instruction)); }
boolean openNextFile() { try { if ( meta.getFileInFields() ) { data.readrow = getRow(); // Grab another row ... if ( data.readrow == null ) { // finished processing! if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.FinishedProcessing" ) ); } return false; } if ( first ) { first = false; data.inputRowMeta = getInputRowMeta(); data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); // Create convert meta-data objects that will contain Date & Number formatters // All non binary content is handled as a String. It would be converted to the target type after the processing. data.convertRowMeta = data.outputRowMeta.cloneToType( ValueMetaInterface.TYPE_STRING ); if ( meta.getFileInFields() ) { // Check is filename field is provided if ( Utils.isEmpty( meta.getDynamicFilenameField() ) ) { logError( BaseMessages.getString( PKG, "LoadFileInput.Log.NoField" ) ); throw new KettleException( BaseMessages.getString( PKG, "LoadFileInput.Log.NoField" ) ); } // cache the position of the field if ( data.indexOfFilenameField < 0 ) { data.indexOfFilenameField = data.inputRowMeta.indexOfValue( meta.getDynamicFilenameField() ); if ( data.indexOfFilenameField < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "LoadFileInput.Log.ErrorFindingField" ) + "[" + meta.getDynamicFilenameField() + "]" ); throw new KettleException( BaseMessages.getString( PKG, "LoadFileInput.Exception.CouldnotFindField", meta.getDynamicFilenameField() ) ); } } // Get the number of previous fields data.totalpreviousfields = data.inputRowMeta.size(); } } // end if first // get field value String Fieldvalue = data.inputRowMeta.getString( data.readrow, data.indexOfFilenameField ); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.Stream", meta.getDynamicFilenameField(), Fieldvalue ) ); } try { // Source is a file. data.file = KettleVFS.getFileObject( Fieldvalue ); } catch ( Exception e ) { throw new KettleException( e ); } } else { if ( data.filenr >= data.files.nrOfFiles() ) { // finished processing! if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.FinishedProcessing" ) ); } return false; } // Is this the last file? data.last_file = ( data.filenr == data.files.nrOfFiles() - 1 ); data.file = data.files.getFile( data.filenr ); } // Check if file exists if ( meta.isIgnoreMissingPath() && !data.file.exists() ) { logBasic( BaseMessages.getString( PKG, "LoadFileInput.Error.FileNotExists", "" + data.file.getName() ) ); return openNextFile(); } // Check if file is empty data.fileSize = data.file.getContent().getSize(); // Move file pointer ahead! data.filenr++; if ( meta.isIgnoreEmptyFile() && data.fileSize == 0 ) { logError( BaseMessages.getString( PKG, "LoadFileInput.Error.FileSizeZero", "" + data.file.getName() ) ); return openNextFile(); } else { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.OpeningFile", data.file.toString() ) ); } data.filename = KettleVFS.getFilename( data.file ); // Add additional fields? if ( meta.getShortFileNameField() != null && meta.getShortFileNameField().length() > 0 ) { data.shortFilename = data.file.getName().getBaseName(); } if ( meta.getPathField() != null && meta.getPathField().length() > 0 ) { data.path = KettleVFS.getFilename( data.file.getParent() ); } if ( meta.isHiddenField() != null && meta.isHiddenField().length() > 0 ) { data.hidden = data.file.isHidden(); } if ( meta.getExtensionField() != null && meta.getExtensionField().length() > 0 ) { data.extension = data.file.getName().getExtension(); } if ( meta.getLastModificationDateField() != null && meta.getLastModificationDateField().length() > 0 ) { data.lastModificationDateTime = new Date( data.file.getContent().getLastModifiedTime() ); } if ( meta.getUriField() != null && meta.getUriField().length() > 0 ) { data.uriName = Const.optionallyDecodeUriString( data.file.getName().getURI() ); } if ( meta.getRootUriField() != null && meta.getRootUriField().length() > 0 ) { data.rootUriName = data.file.getName().getRootURI(); } // get File content getFileContent(); addFileToResultFilesName( data.file ); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "LoadFileInput.Log.FileOpened", data.file.toString() ) ); } } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "LoadFileInput.Log.UnableToOpenFile", "" + data.filenr, data.file .toString(), e.toString() ) ); stopAll(); setErrors( 1 ); return false; } return true; }
@Test public void testOpenNextFile_noFiles() { assertFalse( stepMetaInterface.isIgnoreEmptyFile() ); // ensure default value assertFalse( stepLoadFileInput.openNextFile() ); }
@SuppressWarnings("unused") // Part of required API. public void execute( final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final InsertValues insertValues = statement.getStatement(); final MetaStore metaStore = executionContext.getMetaStore(); final KsqlConfig config = statement.getSessionConfig().getConfig(true); final DataSource dataSource = getDataSource(config, metaStore, insertValues); validateInsert(insertValues.getColumns(), dataSource); final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext); try { producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps()); } catch (final TopicAuthorizationException e) { // TopicAuthorizationException does not give much detailed information about why it failed, // except which topics are denied. Here we just add the ACL to make the error message // consistent with other authorization error messages. final Exception rootCause = new KsqlTopicAuthorizationException( AclOperation.WRITE, e.unauthorizedTopics() ); throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause); } catch (final ClusterAuthorizationException e) { // ClusterAuthorizationException is thrown when using idempotent producers // and either a topic write permission or a cluster-level idempotent write // permission (only applicable for broker versions no later than 2.8) is // missing. In this case, we include additional context to help the user // distinguish this type of failure from other permissions exceptions // such as the ones thrown above when TopicAuthorizationException is caught. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } catch (final KafkaException e) { if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) { // The error message thrown when an idempotent producer is missing permissions // is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException, // as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException. // ksqlDB handles these two the same way, accordingly. // See https://issues.apache.org/jira/browse/KAFKA-14138 for more. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } else { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } } catch (final Exception e) { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } }
@Test public void shouldThrowOnClusterAuthorizationExceptionWrappedInKafkaException() { // Given: final ConfiguredStatement<InsertValues> statement = givenInsertValues( allAndPseudoColumnNames(SCHEMA), ImmutableList.of( new LongLiteral(1L), new StringLiteral("str"), new StringLiteral("str"), new LongLiteral(2L)) ); doThrow(new KafkaException( "Cannot execute transactional method because we are in an error state", new ClusterAuthorizationException("Cluster authorization failed")) ).when(producer).send(any()); // When: final Exception e = assertThrows( KsqlException.class, () -> executor.execute(statement, mock(SessionProperties.class), engine, serviceContext) ); // Then: assertThat(e.getCause(), (hasMessage( containsString("Authorization denied to Write on topic(s): [" + TOPIC_NAME + "]. " + "Caused by: The producer is not authorized to do idempotent sends. " + "Check that you have write permissions to the specified topic, " + "and disable idempotent sends by setting 'enable.idempotent=false' " + " if necessary.")))); }
public static ServiceDiscovery<ZookeeperInstance> buildServiceDiscovery( CuratorFramework curatorFramework, String basePath) { return ServiceDiscoveryBuilder.builder(ZookeeperInstance.class) .client(curatorFramework) .basePath(basePath) .build(); }
@Test void testBuildServiceDiscovery() throws Exception { CuratorFramework curatorFramework = CuratorFrameworkUtils.buildCuratorFramework(registryUrl, null); ServiceDiscovery<ZookeeperInstance> discovery = CuratorFrameworkUtils.buildServiceDiscovery(curatorFramework, ROOT_PATH.getParameterValue(registryUrl)); Assertions.assertNotNull(discovery); discovery.close(); curatorFramework.getZookeeperClient().close(); }
public synchronized String decrypt(String keyRingId, String keyId, String ciphertext) { CryptoKeyName keyName = CryptoKeyName.of(projectId, region, keyRingId, keyId); LOG.info("Decrypting given ciphertext using key {}.", keyName.toString()); try (KeyManagementServiceClient client = clientFactory.getKMSClient()) { DecryptResponse response = client.decrypt( keyName, ByteString.copyFrom( Base64.getDecoder().decode(ciphertext.getBytes(StandardCharsets.UTF_8)))); LOG.info("Successfully decrypted ciphertext."); return response.getPlaintext().toStringUtf8(); } }
@Test public void testDecryptShouldEncodeEncryptedMessageWithUTF8() { String ciphertext = "ciphertext"; DecryptResponse decryptedResponse = DecryptResponse.newBuilder().setPlaintext(ByteString.copyFromUtf8(ciphertext)).build(); String base64EncodedCiphertext = new String( Base64.getEncoder().encode(ciphertext.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8); when(kmsClientFactory.getKMSClient()).thenReturn(serviceClient); when(serviceClient.decrypt(any(CryptoKeyName.class), any(ByteString.class))) .thenReturn(decryptedResponse); String actual = testManager.decrypt(KEYRING_ID, KEY_ID, base64EncodedCiphertext); verify(serviceClient) .decrypt(any(CryptoKeyName.class), eq(ByteString.copyFromUtf8(ciphertext))); assertThat(actual).isEqualTo(ciphertext); }
public Set<String> assembleAllWatchKeys(String appId, String clusterName, String namespace, String dataCenter) { Multimap<String, String> watchedKeysMap = assembleAllWatchKeys(appId, clusterName, Sets.newHashSet(namespace), dataCenter); return Sets.newHashSet(watchedKeysMap.get(namespace)); }
@Test public void testAssembleWatchKeysForNoAppIdPlaceHolder() throws Exception { Multimap<String, String> watchKeysMap = watchKeysUtil.assembleAllWatchKeys(ConfigConsts.NO_APPID_PLACEHOLDER, someCluster, Sets.newHashSet(someNamespace, anotherNamespace), someDC); assertTrue(watchKeysMap.isEmpty()); }
public int doWork() { final long nowNs = nanoClock.nanoTime(); cachedNanoClock.update(nowNs); dutyCycleTracker.measureAndUpdate(nowNs); final int workCount = commandQueue.drain(CommandProxy.RUN_TASK, Configuration.COMMAND_DRAIN_LIMIT); final long shortSendsBefore = shortSends.get(); final int bytesSent = doSend(nowNs); int bytesReceived = 0; if (0 == bytesSent || ++dutyCycleCounter >= dutyCycleRatio || (controlPollDeadlineNs - nowNs < 0) || shortSendsBefore < shortSends.get()) { bytesReceived = controlTransportPoller.pollTransports(); dutyCycleCounter = 0; controlPollDeadlineNs = nowNs + statusMessageReadTimeoutNs; } if (reResolutionCheckIntervalNs > 0 && (reResolutionDeadlineNs - nowNs) < 0) { reResolutionDeadlineNs = nowNs + reResolutionCheckIntervalNs; controlTransportPoller.checkForReResolutions(nowNs, conductorProxy); } return workCount + bytesSent + bytesReceived; }
@Test void shouldSendSetupFrameOnChannelWhenTimeoutWithoutStatusMessage() { sender.doWork(); assertThat(receivedFrames.size(), is(1)); nanoClock.advance(Configuration.PUBLICATION_SETUP_TIMEOUT_NS - 1); sender.doWork(); assertThat(receivedFrames.size(), is(1)); nanoClock.advance(10); sender.doWork(); assertThat(receivedFrames.size(), is(2)); setupHeader.wrap(new UnsafeBuffer(receivedFrames.remove())); assertThat(setupHeader.frameLength(), is(SetupFlyweight.HEADER_LENGTH)); assertThat(setupHeader.initialTermId(), is(INITIAL_TERM_ID)); assertThat(setupHeader.activeTermId(), is(INITIAL_TERM_ID)); assertThat(setupHeader.streamId(), is(STREAM_ID)); assertThat(setupHeader.sessionId(), is(SESSION_ID)); assertThat(setupHeader.headerType(), is(HeaderFlyweight.HDR_TYPE_SETUP)); assertThat(setupHeader.flags(), is((short)0)); assertThat(setupHeader.version(), is((short)HeaderFlyweight.CURRENT_VERSION)); }
@Override @SuppressWarnings("unchecked") public int run() throws IOException { Preconditions.checkArgument(targets != null && targets.size() == 1, "Parquet file is required."); if (targets.size() > 1) { Preconditions.checkArgument(outputPath == null, "Cannot output multiple schemas to file %s", outputPath); for (String source : targets) { console.info("{}: {}", source, getSchema(source)); } } else { String source = targets.get(0); if (outputPath != null) { try (OutputStream out = overwrite ? create(outputPath) : createWithNoOverwrite(outputPath)) { out.write(getSchema(source).getBytes(StandardCharsets.UTF_8)); } } else { console.info(getSchema(source)); } } return 0; }
@Test public void testSchemaCommandOverwriteExistentFile() throws IOException { File inputFile = parquetFile(); File outputFile = new File(getTempFolder(), getClass().getSimpleName() + ".avsc"); FileUtils.touch(outputFile); Assert.assertEquals(0, outputFile.length()); SchemaCommand command = new SchemaCommand(createLogger()); command.targets = Arrays.asList(inputFile.getAbsolutePath()); command.outputPath = outputFile.getAbsolutePath(); command.overwrite = true; command.setConf(new Configuration()); Assert.assertEquals(0, command.run()); Assert.assertTrue(0 < outputFile.length()); }
@Override public Branch getBranch() { checkState(branch.isInitialized(), BRANCH_NOT_SET); return branch.getProperty(); }
@Test public void getBranch_throws_ISE_when_holder_is_not_initialized() { assertThatThrownBy(() -> new AnalysisMetadataHolderImpl(editionProvider).getBranch()) .isInstanceOf(IllegalStateException.class) .hasMessage("Branch has not been set"); }
@Override public long checksum() { byte[] bs = new byte[16]; Bits.putLong(bs, 0, this.index); Bits.putLong(bs, 8, this.term); return CrcUtil.crc64(bs); }
@Test public void testChecksum() { LogId logId = new LogId(); logId.setIndex(1); logId.setTerm(2); long c = logId.checksum(); assertTrue(c != 0); assertEquals(c, logId.checksum()); }
public static KafkaPrincipalBuilder createPrincipalBuilder(Map<String, ?> configs, KerberosShortNamer kerberosShortNamer, SslPrincipalMapper sslPrincipalMapper) { Class<?> principalBuilderClass = (Class<?>) configs.get(BrokerSecurityConfigs.PRINCIPAL_BUILDER_CLASS_CONFIG); final KafkaPrincipalBuilder builder; if (principalBuilderClass == null || principalBuilderClass == DefaultKafkaPrincipalBuilder.class) { builder = new DefaultKafkaPrincipalBuilder(kerberosShortNamer, sslPrincipalMapper); } else if (KafkaPrincipalBuilder.class.isAssignableFrom(principalBuilderClass)) { builder = (KafkaPrincipalBuilder) Utils.newInstance(principalBuilderClass); } else { throw new InvalidConfigurationException("Type " + principalBuilderClass.getName() + " is not " + "an instance of " + KafkaPrincipalBuilder.class.getName()); } if (builder instanceof Configurable) ((Configurable) builder).configure(configs); return builder; }
@Test public void testCreateConfigurableKafkaPrincipalBuilder() { Map<String, Object> configs = new HashMap<>(); configs.put(BrokerSecurityConfigs.PRINCIPAL_BUILDER_CLASS_CONFIG, ConfigurableKafkaPrincipalBuilder.class); KafkaPrincipalBuilder builder = ChannelBuilders.createPrincipalBuilder(configs, null, null); assertInstanceOf(ConfigurableKafkaPrincipalBuilder.class, builder); assertTrue(((ConfigurableKafkaPrincipalBuilder) builder).configured); }
public WorkProcessor<Page> merge(List<Type> keyTypes, List<Type> allTypes, List<WorkProcessor<Page>> pages, DriverYieldSignal driverYieldSignal) { return merge(keyTypes, null, allTypes, pages, driverYieldSignal); }
@Test public void testBinaryMergeIteratorOverPageWith() { Page emptyPage = new Page(0, BIGINT.createFixedSizeBlockBuilder(0).build()); Page page = rowPagesBuilder(BIGINT).row(42).build().get(0); WorkProcessor<Page> mergedPage = new MergeHashSort(newSimpleAggregatedMemoryContext()).merge( ImmutableList.of(BIGINT), ImmutableList.of(BIGINT), ImmutableList.of(ImmutableList.of(emptyPage, page).iterator()).stream() .map(WorkProcessor::fromIterator) .collect(toImmutableList()), new DriverYieldSignal()); assertTrue(mergedPage.process()); Page actualPage = mergedPage.getResult(); assertEquals(actualPage.getPositionCount(), 1); assertEquals(actualPage.getChannelCount(), 1); assertEquals(actualPage.getBlock(0).getLong(0), 42); assertFinishes(mergedPage); }
public ConsumerGroupDescribeResponseData.DescribedGroup asDescribedGroup( long committedOffset, String defaultAssignor, TopicsImage topicsImage ) { ConsumerGroupDescribeResponseData.DescribedGroup describedGroup = new ConsumerGroupDescribeResponseData.DescribedGroup() .setGroupId(groupId) .setAssignorName(preferredServerAssignor(committedOffset).orElse(defaultAssignor)) .setGroupEpoch(groupEpoch.get(committedOffset)) .setGroupState(state.get(committedOffset).toString()) .setAssignmentEpoch(targetAssignmentEpoch.get(committedOffset)); members.entrySet(committedOffset).forEach( entry -> describedGroup.members().add( entry.getValue().asConsumerGroupDescribeMember( targetAssignment.get(entry.getValue().memberId(), committedOffset), topicsImage ) ) ); return describedGroup; }
@Test public void testAsDescribedGroup() { SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext()); ConsumerGroup group = new ConsumerGroup(snapshotRegistry, "group-id-1", mock(GroupCoordinatorMetricsShard.class)); snapshotRegistry.idempotentCreateSnapshot(0); assertEquals(ConsumerGroup.ConsumerGroupState.EMPTY.toString(), group.stateAsString(0)); group.updateMember(new ConsumerGroupMember.Builder("member1") .setSubscribedTopicNames(Collections.singletonList("foo")) .setServerAssignorName("assignorName") .build()); group.updateMember(new ConsumerGroupMember.Builder("member2") .build()); snapshotRegistry.idempotentCreateSnapshot(1); ConsumerGroupDescribeResponseData.DescribedGroup expected = new ConsumerGroupDescribeResponseData.DescribedGroup() .setGroupId("group-id-1") .setGroupState(ConsumerGroup.ConsumerGroupState.STABLE.toString()) .setGroupEpoch(0) .setAssignmentEpoch(0) .setAssignorName("assignorName") .setMembers(Arrays.asList( new ConsumerGroupDescribeResponseData.Member() .setMemberId("member1") .setSubscribedTopicNames(Collections.singletonList("foo")) .setSubscribedTopicRegex(""), new ConsumerGroupDescribeResponseData.Member().setMemberId("member2") .setSubscribedTopicRegex("") )); ConsumerGroupDescribeResponseData.DescribedGroup actual = group.asDescribedGroup(1, "", new MetadataImageBuilder().build().topics()); assertEquals(expected, actual); }
@Override protected void decode(final ChannelHandlerContext ctx, final ByteBuf in, final List<Object> out) { MySQLPacketPayload payload = new MySQLPacketPayload(in, ctx.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get()); if (handshakeReceived) { MySQLPacket responsePacket = decodeResponsePacket(payload); if (responsePacket instanceof MySQLOKPacket) { ctx.channel().pipeline().remove(this); } out.add(responsePacket); } else { out.add(decodeHandshakePacket(payload)); handshakeReceived = true; } }
@Test void assertDecodeAuthSwitchRequestPacket() throws ReflectiveOperationException { MySQLNegotiatePackageDecoder negotiatePackageDecoder = new MySQLNegotiatePackageDecoder(); Plugins.getMemberAccessor().set(MySQLNegotiatePackageDecoder.class.getDeclaredField("handshakeReceived"), negotiatePackageDecoder, true); List<Object> actual = new LinkedList<>(); negotiatePackageDecoder.decode(channelHandlerContext, authSwitchRequestPacket(), actual); assertPacketByType(actual, MySQLAuthSwitchRequestPacket.class); }
public CompletableFuture<Void> requestDownloadTopologyBlobs(final LocalAssignment assignment, final int port, final BlobChangingCallback cb) throws IOException { final PortAndAssignment pna = new TimePortAndAssignment(new PortAndAssignmentImpl(port, assignment), blobLocalizationDuration); final String topologyId = pna.getToplogyId(); LOG.info("requestDownloadTopologyBlobs for {}", pna); CompletableFuture<Void> baseBlobs = requestDownloadBaseTopologyBlobs(pna, cb); return baseBlobs.thenComposeAsync((v) -> blobPending.compute(topologyId, (tid, old) -> { CompletableFuture<Void> ret = old; if (ret == null) { ret = CompletableFuture.supplyAsync(new DownloadBlobs(pna, cb), taskExecService); } else { try { addReferencesToBlobs(pna, cb); } catch (Exception e) { LOG.error("Failed adding references to blobs for " + pna, e); throw new RuntimeException(e); } finally { pna.complete(); } } LOG.debug("Reserved blobs {} {}", topologyId, ret); return ret; })); }
@Test public void testRequestDownloadTopologyBlobs() throws Exception { ConfigUtils mockedConfigUtils = mock(ConfigUtils.class); ConfigUtils previousConfigUtils = ConfigUtils.setInstance(mockedConfigUtils); AsyncLocalizer victim = null; try (TmpPath stormLocal = new TmpPath(); TmpPath localizerRoot = new TmpPath()) { Map<String, Object> conf = new HashMap<>(); conf.put(Config.STORM_LOCAL_DIR, stormLocal.getPath()); AdvancedFSOps ops = AdvancedFSOps.make(conf); StormMetricsRegistry metricsRegistry = new StormMetricsRegistry(); victim = spy(new AsyncLocalizer(conf, ops, localizerRoot.getPath(), metricsRegistry)); final String topoId = "TOPO-12345"; final String user = "user"; final Path userDir = Paths.get(stormLocal.getPath(), user); final Path topologyDirRoot = Paths.get(stormLocal.getPath(), topoId); final String simpleLocalName = "simple.txt"; final String simpleKey = "simple"; Map<String, Map<String, Object>> topoBlobMap = new HashMap<>(); Map<String, Object> simple = new HashMap<>(); simple.put("localname", simpleLocalName); simple.put("uncompress", false); topoBlobMap.put(simpleKey, simple); final int port = 8080; Map<String, Object> topoConf = new HashMap<>(conf); topoConf.put(Config.TOPOLOGY_BLOBSTORE_MAP, topoBlobMap); topoConf.put(Config.TOPOLOGY_NAME, "TOPO"); List<LocalizedResource> localizedList = new ArrayList<>(); LocalizedResource simpleLocal = new LocalizedResource(simpleKey, localizerRoot.getFile().toPath(), false, ops, conf, user, metricsRegistry); localizedList.add(simpleLocal); when(mockedConfigUtils.supervisorStormDistRootImpl(conf, topoId)).thenReturn(topologyDirRoot.toString()); when(mockedConfigUtils.readSupervisorStormConfImpl(conf, topoId)).thenReturn(topoConf); when(mockedConfigUtils.readSupervisorTopologyImpl(conf, topoId, ops)).thenReturn(constructEmptyStormTopology()); //Write the mocking backwards so the actual method is not called on the spy object doReturn(CompletableFuture.supplyAsync(() -> null)).when(victim) .requestDownloadBaseTopologyBlobs(any(), eq(null)); Files.createDirectories(topologyDirRoot); doReturn(userDir.toFile()).when(victim).getLocalUserFileCacheDir(user); doReturn(localizedList).when(victim).getBlobs(any(List.class), any(), any()); Future<Void> f = victim.requestDownloadTopologyBlobs(constructLocalAssignment(topoId, user), port, null); f.get(20, TimeUnit.SECONDS); // We should be done now... verify(victim).getLocalUserFileCacheDir(user); assertTrue(ops.fileExists(userDir)); verify(victim).getBlobs(any(List.class), any(), any()); // symlink was created assertTrue(Files.isSymbolicLink(topologyDirRoot.resolve(simpleLocalName))); } finally { ConfigUtils.setInstance(previousConfigUtils); if (victim != null) { victim.close(); } } }
protected static String getReverseZoneNetworkAddress(String baseIp, int range, int index) throws UnknownHostException { if (index < 0) { throw new IllegalArgumentException( String.format("Invalid index provided, must be positive: %d", index)); } if (range < 0) { throw new IllegalArgumentException( String.format("Invalid range provided, cannot be negative: %d", range)); } return calculateIp(baseIp, range, index); }
@Test public void testThrowIllegalArgumentExceptionIfRangeIsNegative() throws Exception { exception.expect(IllegalArgumentException.class); ReverseZoneUtils.getReverseZoneNetworkAddress(NET, -1, INDEX); }
public static <T extends Serializable> SerializableCoder<T> of(TypeDescriptor<T> type) { @SuppressWarnings("unchecked") Class<T> clazz = (Class<T>) type.getRawType(); return new SerializableCoder<>(clazz, type); }
@Test @Category(NeedsRunner.class) public void testDefaultCoder() throws Exception { p.enableAbandonedNodeEnforcement(true); // Use MyRecord as input and output types without explicitly specifying // a coder (this uses the default coders, which may not be // SerializableCoder). PCollection<String> output = p.apply(Create.of("Hello", "World")) .apply(ParDo.of(new StringToRecord())) .apply(ParDo.of(new RecordToString())); PAssert.that(output).containsInAnyOrder("Hello", "World"); p.run(); }
@Override public Enumeration<URL> getResources(String name) throws IOException { List<URL> resources = new ArrayList<>(); ClassLoadingStrategy loadingStrategy = getClassLoadingStrategy(name); log.trace("Received request to load resources '{}'", name); for (ClassLoadingStrategy.Source classLoadingSource : loadingStrategy.getSources()) { switch (classLoadingSource) { case APPLICATION: if (getParent() != null) { resources.addAll(Collections.list(getParent().getResources(name))); } break; case PLUGIN: resources.addAll(Collections.list(findResources(name))); break; case DEPENDENCIES: resources.addAll(findResourcesFromDependencies(name)); break; } } return Collections.enumeration(resources); }
@Test void parentFirstGetResourcesExistsInParent() throws IOException, URISyntaxException { Enumeration<URL> resources = parentFirstPluginClassLoader.getResources("META-INF/file-only-in-parent"); assertNumberOfResourcesAndFirstLineOfFirstElement(1, "parent", resources); }
@Nullable static String normalizeIdField(String field, @Nullable String id, boolean isNullable) { if (id == null) { if (isNullable) return null; throw new NullPointerException(field + " == null"); } int length = id.length(); if (length == 0) { if (isNullable) return null; throw new IllegalArgumentException(field + " is empty"); } int desiredLength = field.equals("traceId") && length > 16 ? 32 : 16; int existingPadding = validateHexAndReturnPadding(field, id, desiredLength); if (desiredLength == 32 && existingPadding >= 16) { // overly padded traceId return id.substring(16); } return length == desiredLength ? id : padLeft(id, desiredLength, existingPadding); }
@Test void normalizeIdField_padsTo128() { assertThat(normalizeIdField("traceId", "4d2000000000000162e", false)) .isEqualTo("00000000000004d2000000000000162e"); }
@Override public void getPipeline( GetJobPipelineRequest request, StreamObserver<GetJobPipelineResponse> responseObserver) { LOG.trace("{} {}", GetJobPipelineRequest.class.getSimpleName(), request); String invocationId = request.getJobId(); try { JobInvocation invocation = getInvocation(invocationId); RunnerApi.Pipeline pipeline = invocation.getPipeline(); GetJobPipelineResponse response = GetJobPipelineResponse.newBuilder().setPipeline(pipeline).build(); responseObserver.onNext(response); responseObserver.onCompleted(); } catch (StatusRuntimeException | StatusException e) { responseObserver.onError(e); } catch (Exception e) { String errMessage = String.format("Encountered Unexpected Exception for Invocation %s", invocationId); LOG.error(errMessage, e); responseObserver.onError(Status.INTERNAL.withCause(e).asException()); } }
@Test public void testGetPipelineFailure() { prepareJob(); JobApi.GetJobPipelineRequest request = JobApi.GetJobPipelineRequest.newBuilder().setJobId(TEST_JOB_ID).build(); RecordingObserver<JobApi.GetJobPipelineResponse> recorder = new RecordingObserver<>(); service.getPipeline(request, recorder); // job has not been run yet assertThat(recorder.isSuccessful(), is(false)); assertThat(recorder.error, isA(StatusException.class)); }
@Override public int read() throws IOException { if (mPosition == mLength) { // at end of file return -1; } updateStreamIfNeeded(); int res = mUfsInStream.get().read(); if (res == -1) { return -1; } mPosition++; Metrics.BYTES_READ_FROM_UFS.inc(1); return res; }
@Test public void manyBytesRead() throws IOException, AlluxioException { AlluxioURI ufsPath = getUfsPath(); createFile(ufsPath, CHUNK_SIZE); try (FileInStream inStream = getStream(ufsPath)) { byte[] res = new byte[CHUNK_SIZE]; assertEquals(CHUNK_SIZE, inStream.read(res)); assertTrue(BufferUtils.equalIncreasingByteArray(CHUNK_SIZE, res)); } }
@Override public void start(final Xid xid, final int flags) throws XAException { try { delegate.start(xid, flags); } catch (final XAException ex) { throw mapXAException(ex); } }
@Test void assertStart() throws XAException { singleXAResource.start(xid, 1); verify(xaResource).start(xid, 1); }
@Override public String upgradeFirmwareOndemand(String target) { DriverHandler handler = handler(); NetconfController controller = handler.get(NetconfController.class); MastershipService mastershipService = handler.get(MastershipService.class); DeviceId ncDeviceId = handler.data().deviceId(); checkNotNull(controller, "Netconf controller is null"); String reply = null; int count; if (!mastershipService.isLocalMaster(ncDeviceId)) { log.warn("Not master for {} Use {} to execute command", ncDeviceId, mastershipService.getMasterFor(ncDeviceId)); return null; } String[] data = target.split(COLON); if ((data.length < TWO) || (data.length > THREE)) { log.error("Invalid number of arguments"); return null; } String[] onuList = data[SECOND_PART].split(COMMA); if (onuList.length == ZERO) { log.error("No ONU listed"); return null; } if ((data.length > TWO) && (!AUTO.equals(data[THIRD_PART]))) { log.error("Invalid reboot-mode {}", data[THIRD_PART]); return null; } try { StringBuilder request = new StringBuilder(); request.append(ANGLE_LEFT + ONDEMAND_FIRMWARE_UPGRADE + SPACE); request.append(VOLT_NE_NAMESPACE + ANGLE_RIGHT + NEW_LINE); request.append(buildStartTag(PARTICIPANT_LIST)); for (count = ZERO; count < onuList.length; count++) { String[] onuId = onuList[count].split(HYPHEN); if (onuId.length != TWO) { log.error("Invalid ONU identifier"); return null; } try { int pon; pon = Integer.parseInt(onuId[FIRST_PART]); if (pon <= ZERO) { log.error("Invalid integer for ponlink-id:{}", onuId[FIRST_PART]); return null; } int onu; onu = Integer.parseInt(onuId[SECOND_PART]); if (onu <= ZERO) { log.error("Invalid integer for onu-id:{}", onuId[SECOND_PART]); return null; } } catch (NumberFormatException e) { log.error("Non-number input"); return null; } request.append(buildStartTag(MEMBER)) .append(buildStartTag(PONLINK_ID)) .append(onuId[FIRST_PART]) .append(buildEndTag(PONLINK_ID)) .append(buildStartTag(ONU_ID)) .append(onuId[SECOND_PART]) .append(buildEndTag(ONU_ID)) .append(buildEndTag(MEMBER)); } request.append(buildEndTag(PARTICIPANT_LIST)) .append(buildStartTag(IMAGE_NAME)) .append(data[FIRST_PART]) .append(buildEndTag(IMAGE_NAME)); if (data.length == THREE) { request.append(buildStartTag(REBOOT_MODE)) .append(data[THIRD_PART]) .append(buildEndTag(REBOOT_MODE)); } request.append(buildEndTag(ONDEMAND_FIRMWARE_UPGRADE)); reply = controller .getDevicesMap() .get(ncDeviceId) .getSession() .doWrappedRpc(request.toString()); } catch (NetconfException e) { log.error("Cannot communicate to device {} exception {}", ncDeviceId, e); } return reply; }
@Test public void testInvalidOndemandFirmwareUpgradeInput() throws Exception { String reply; String target; for (int i = ZERO; i < INVALID_ONDEMAND_FWDL_TCS.length; i++) { target = INVALID_ONDEMAND_FWDL_TCS[i]; reply = voltConfig.upgradeFirmwareOndemand(target); assertNull("Incorrect response for INVALID_ONDEMAND_FWDL_TCS", reply); } }
@Override public long offset() { if (recordContext == null) { // This is only exposed via the deprecated ProcessorContext, // in which case, we're preserving the pre-existing behavior // of returning dummy values when the record context is undefined. // For offset, the dummy value is `-1L`. return -1L; } else { return recordContext.offset(); } }
@Test public void shouldReturnOffsetFromRecordContext() { assertThat(context.offset(), equalTo(recordContext.offset())); }
public FEELFnResult<Boolean> invoke(@ParameterName("string") String string, @ParameterName("match") String match) { if ( string == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "string", "cannot be null")); } if ( match == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "match", "cannot be null")); } return FEELFnResult.ofResult(string.contains(match)); }
@Test void invokeNotContains() { FunctionTestUtil.assertResult(containsFunction.invoke("test", "ex"), false); FunctionTestUtil.assertResult(containsFunction.invoke("test", "u"), false); FunctionTestUtil.assertResult(containsFunction.invoke("test", "esty"), false); }
@Override public Flux<ReactiveRedisConnection.BooleanResponse<RenameCommand>> renameNX(Publisher<RenameCommand> commands) { return execute(commands, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getNewName(), "New name must not be null!"); byte[] keyBuf = toByteArray(command.getKey()); byte[] newKeyBuf = toByteArray(command.getNewName()); if (executorService.getConnectionManager().calcSlot(keyBuf) == executorService.getConnectionManager().calcSlot(newKeyBuf)) { return super.renameNX(commands); } return exists(command.getNewName()) .zipWith(read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.DUMP, keyBuf)) .filter(newKeyExistsAndDump -> !newKeyExistsAndDump.getT1() && Objects.nonNull(newKeyExistsAndDump.getT2())) .map(Tuple2::getT2) .zipWhen(value -> pTtl(command.getKey()) .filter(Objects::nonNull) .map(ttl -> Math.max(0, ttl)) .switchIfEmpty(Mono.just(0L)) ) .flatMap(valueAndTtl -> write(newKeyBuf, StringCodec.INSTANCE, RedisCommands.RESTORE, newKeyBuf, valueAndTtl.getT2(), valueAndTtl.getT1()) .then(Mono.just(true))) .switchIfEmpty(Mono.just(false)) .doOnSuccess(didRename -> { if (didRename) { del(command.getKey()); } }) .map(didRename -> new BooleanResponse<>(command, didRename)); }); }
@Test public void testRenameNX() { connection.stringCommands().set(originalKey, value).block(); if (hasTtl) { connection.keyCommands().expire(originalKey, Duration.ofSeconds(1000)).block(); } Integer originalSlot = getSlotForKey(originalKey); newKey = getNewKeyForSlot(new String(originalKey.array()), getTargetSlot(originalSlot)); Boolean result = connection.keyCommands().renameNX(originalKey, newKey).block(); assertThat(result).isTrue(); assertThat(connection.stringCommands().get(newKey).block()).isEqualTo(value); if (hasTtl) { assertThat(connection.keyCommands().ttl(newKey).block()).isGreaterThan(0); } else { assertThat(connection.keyCommands().ttl(newKey).block()).isEqualTo(-1); } connection.stringCommands().set(originalKey, value).block(); result = connection.keyCommands().renameNX(originalKey, newKey).block(); assertThat(result).isFalse(); }
@Override @PublicAPI(usage = ACCESS) public SliceRule as(String newDescription) { return copyWithTransformation(new As(newDescription)); }
@Test public void reports_number_of_violations_if_all_cycles_are_reported() { int expectedNumberOfCycles = getNumberOfCyclesInCompleteGraph(7); String failureReport = evaluateCompleteGraphCycleFreeWithCycleLimit(expectedNumberOfCycles); assertThat(failureReport).as("failure report").contains("(" + expectedNumberOfCycles + " times)"); }
public JSONObject getProperties() { return properties; }
@Test public void getProperties() { SAExposureData exposureData = new SAExposureData("ExposeEvent"); exposureData.setProperties(new JSONObject()); Assert.assertNotNull(exposureData.getProperties()); }
@Override public Collection<RedisServer> slaves(NamedNode master) { List<Map<String, String>> slaves = connection.sync(StringCodec.INSTANCE, RedisCommands.SENTINEL_SLAVES, master.getName()); return toRedisServersList(slaves); }
@Test public void testSlaves() { Collection<RedisServer> masters = connection.masters(); Collection<RedisServer> slaves = connection.slaves(masters.iterator().next()); assertThat(slaves).hasSize(2); }
static ConfigServer[] toConfigServers(String configserversString) { return multiValueParameterStream(configserversString) .map(CloudConfigInstallVariables::toConfigServer) .toArray(ConfigServer[]::new); }
@Test public void port_can_be_configured() { CloudConfigOptions.ConfigServer[] parsed = toConfigServers("myhost:123"); int port = parsed[0].port.get(); assertEquals(123, port); }
static boolean needWrap(MethodDescriptor methodDescriptor, Class<?>[] parameterClasses, Class<?> returnClass) { String methodName = methodDescriptor.getMethodName(); // generic call must be wrapped if (CommonConstants.$INVOKE.equals(methodName) || CommonConstants.$INVOKE_ASYNC.equals(methodName)) { return true; } // echo must be wrapped if ($ECHO.equals(methodName)) { return true; } boolean returnClassProtobuf = isProtobufClass(returnClass); // Response foo() if (parameterClasses.length == 0) { return !returnClassProtobuf; } int protobufParameterCount = 0; int javaParameterCount = 0; int streamParameterCount = 0; boolean secondParameterStream = false; // count normal and protobuf param for (int i = 0; i < parameterClasses.length; i++) { Class<?> parameterClass = parameterClasses[i]; if (isProtobufClass(parameterClass)) { protobufParameterCount++; } else { if (isStreamType(parameterClass)) { if (i == 1) { secondParameterStream = true; } streamParameterCount++; } else { javaParameterCount++; } } } // more than one stream param if (streamParameterCount > 1) { throw new IllegalStateException("method params error: more than one Stream params. method=" + methodName); } // protobuf only support one param if (protobufParameterCount >= 2) { throw new IllegalStateException("method params error: more than one protobuf params. method=" + methodName); } // server stream support one normal param and one stream param if (streamParameterCount == 1) { if (javaParameterCount + protobufParameterCount > 1) { throw new IllegalStateException( "method params error: server stream does not support more than one normal param." + " method=" + methodName); } // server stream: void foo(Request, StreamObserver<Response>) if (!secondParameterStream) { throw new IllegalStateException( "method params error: server stream's second param must be StreamObserver." + " method=" + methodName); } } if (methodDescriptor.getRpcType() != MethodDescriptor.RpcType.UNARY) { if (MethodDescriptor.RpcType.SERVER_STREAM == methodDescriptor.getRpcType()) { if (!secondParameterStream) { throw new IllegalStateException( "method params error:server stream's second param must be StreamObserver." + " method=" + methodName); } } // param type must be consistent if (returnClassProtobuf) { if (javaParameterCount > 0) { throw new IllegalStateException( "method params error: both normal and protobuf param found. method=" + methodName); } } else { if (protobufParameterCount > 0) { throw new IllegalStateException("method params error method=" + methodName); } } } else { if (streamParameterCount > 0) { throw new IllegalStateException( "method params error: unary method should not contain any StreamObserver." + " method=" + methodName); } if (protobufParameterCount > 0 && returnClassProtobuf) { return false; } // handler reactor or rxjava only consider gen by proto if (isMono(returnClass) || isRx(returnClass)) { return false; } if (protobufParameterCount <= 0 && !returnClassProtobuf) { return true; } // handle grpc stub only consider gen by proto if (GRPC_ASYNC_RETURN_CLASS.equalsIgnoreCase(returnClass.getName()) && protobufParameterCount == 1) { return false; } // handle dubbo generated method if (TRI_ASYNC_RETURN_CLASS.equalsIgnoreCase(returnClass.getName())) { Class<?> actualReturnClass = (Class<?>) ((ParameterizedType) methodDescriptor.getMethod().getGenericReturnType()) .getActualTypeArguments()[0]; boolean actualReturnClassProtobuf = isProtobufClass(actualReturnClass); if (actualReturnClassProtobuf && protobufParameterCount == 1) { return false; } if (!actualReturnClassProtobuf && protobufParameterCount == 0) { return true; } } // todo remove this in future boolean ignore = checkNeedIgnore(returnClass); if (ignore) { return protobufParameterCount != 1; } throw new IllegalStateException("method params error method=" + methodName); } // java param should be wrapped return javaParameterCount > 0; }
@Test void testMethodWithNoParametersAndReturnJava() throws Exception { Method method = DescriptorService.class.getMethod("noParameterAndReturnJavaClassMethod"); MethodDescriptor descriptor = new ReflectionMethodDescriptor(method); assertEquals("", descriptor.getParamDesc()); Assertions.assertEquals(0, descriptor.getParameterClasses().length); assertTrue(needWrap(descriptor)); }
public static ReadChangeStream readChangeStream() { return ReadChangeStream.create(); }
@Test public void testReadChangeStreamPassWithoutValidation() { BigtableIO.ReadChangeStream readChangeStream = BigtableIO.readChangeStream() .withProjectId("project") .withInstanceId("instance") .withTableId("table") .withoutValidation(); // No error is thrown because we skip validation readChangeStream.validate(TestPipeline.testingPipelineOptions()); }
public static ConsumerCreationStrategyFactory create(PulsarConsumer pulsarConsumer) { validate(pulsarConsumer); return new ConsumerCreationStrategyFactory(pulsarConsumer); }
@Test public void givenPulsarConsumerIsNullwhenICreateFactoryverifyIllegalArgumentExceptionIsThrown() { assertThrows(IllegalArgumentException.class, () -> ConsumerCreationStrategyFactory.create(null)); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { try { final IRODSFileSystemAO fs = session.getClient(); final IRODSFileFactory factory = fs.getIRODSFileFactory(); final IRODSFile f = factory.instanceIRODSFile(file.getAbsolute()); if(f.exists()) { final InputStream in = new PackingIrodsInputStream(factory.instanceIRODSFileInputStream(f)); if(status.isAppend()) { return StreamCopier.skip(in, status.getOffset()); } return in; } else { throw new NotfoundException(file.getAbsolute()); } } catch(JargonRuntimeException e) { if(e.getCause() instanceof JargonException) { throw (JargonException) e.getCause(); } throw new DefaultExceptionMappingService().map(e); } } catch(JargonException e) { throw new IRODSExceptionMappingService().map("Download {0} failed", e, file); } }
@Test(expected = NotfoundException.class) public void testReadNotFound() throws Exception { final ProtocolFactory factory = new ProtocolFactory(new HashSet<>(Collections.singleton(new IRODSProtocol()))); final Profile profile = new ProfilePlistReader(factory).read( this.getClass().getResourceAsStream("/iRODS (iPlant Collaborative).cyberduckprofile")); final Host host = new Host(profile, profile.getDefaultHostname(), new Credentials( PROPERTIES.get("irods.key"), PROPERTIES.get("irods.secret") )); final IRODSSession session = new IRODSSession(host); session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final Path test = new Path(new IRODSHomeFinderService(session).find(), UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); assertFalse(session.getFeature(Find.class).find(test)); new IRODSReadFeature(session).read(test, new TransferStatus(), new DisabledConnectionCallback()); }
protected RemotingCommand request(ChannelHandlerContext ctx, RemotingCommand request, ProxyContext context, long timeoutMillis) throws Exception { String brokerName; if (request.getCode() == RequestCode.SEND_MESSAGE_V2) { if (request.getExtFields().get(BROKER_NAME_FIELD_FOR_SEND_MESSAGE_V2) == null) { return RemotingCommand.buildErrorResponse(ResponseCode.VERSION_NOT_SUPPORTED, "Request doesn't have field bname"); } brokerName = request.getExtFields().get(BROKER_NAME_FIELD_FOR_SEND_MESSAGE_V2); } else { if (request.getExtFields().get(BROKER_NAME_FIELD) == null) { return RemotingCommand.buildErrorResponse(ResponseCode.VERSION_NOT_SUPPORTED, "Request doesn't have field bname"); } brokerName = request.getExtFields().get(BROKER_NAME_FIELD); } if (request.isOnewayRPC()) { messagingProcessor.requestOneway(context, brokerName, request, timeoutMillis); return null; } messagingProcessor.request(context, brokerName, request, timeoutMillis) .thenAccept(r -> writeResponse(ctx, context, request, r)) .exceptionally(t -> { writeErrResponse(ctx, context, request, t); return null; }); return null; }
@Test public void testRequest() throws Exception { String brokerName = "broker"; RemotingCommand response = RemotingCommand.createResponseCommand(ResponseCode.SUCCESS, "remark"); when(messagingProcessorMock.request(any(), eq(brokerName), any(), anyLong())).thenReturn(CompletableFuture.completedFuture( response )); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.PULL_MESSAGE, null); request.addExtField(AbstractRemotingActivity.BROKER_NAME_FIELD, brokerName); RemotingCommand remotingCommand = remotingActivity.request(ctx, request, null, 10000); assertThat(remotingCommand).isNull(); verify(ctx, times(1)).writeAndFlush(response); }
@Override public String[] split(String text) { boundary.setText(text); List<String> words = new ArrayList<>(); int start = boundary.first(); int end = boundary.next(); while (end != BreakIterator.DONE) { String word = text.substring(start, end).trim(); if (!word.isEmpty()) { words.add(word); } start = end; end = boundary.next(); } return words.toArray(new String[0]); }
@Test public void testSplitHyphen() { System.out.println("tokenize hyphen"); String text = "On a noncash basis for the quarter, the bank reported a " + "loss of $7.3 billion because of a $10.4 billion write-down " + "in the value of its credit card unit, attributed to federal " + "regulations that limit debit fees and other charges."; String[] expResult = {"On", "a", "noncash", "basis", "for", "the", "quarter", ",", "the", "bank", "reported", "a", "loss", "of", "$7.3", "billion", "because", "of", "a", "$10.4", "billion", "write-down", "in", "the", "value", "of", "its", "credit", "card", "unit", ",", "attributed", "to", "federal", "regulations", "that", "limit", "debit", "fees", "and", "other", "charges", "."}; BreakIteratorTokenizer instance = new BreakIteratorTokenizer(); String[] result = instance.split(text); assertEquals(expResult.length, result.length); for (int i = 0; i < result.length; i++) { assertEquals(expResult[i], result[i]); } }
@VisibleForTesting static boolean isBrokenPipe(IOException original) { Throwable exception = original; while (exception != null) { String message = exception.getMessage(); if (message != null && message.toLowerCase(Locale.US).contains("broken pipe")) { return true; } exception = exception.getCause(); if (exception == original) { // just in case if there's a circular chain return false; } } return false; }
@Test public void testIsBrokenPipe_nestedBrokenPipe() { IOException exception = new IOException(new SSLException(new SocketException("Broken pipe"))); Assert.assertTrue(RegistryEndpointCaller.isBrokenPipe(exception)); }
void performCleanUp(@Nullable Runnable task) { evictionLock.lock(); try { maintenance(task); } finally { evictionLock.unlock(); } rescheduleCleanUpIfIncomplete(); }
@Test @CheckMaxLogLevel(ERROR) public void cleanupTask_exception() { var expected = new RuntimeException(); BoundedLocalCache<?, ?> cache = Mockito.mock(); doThrow(expected).when(cache).performCleanUp(any()); var task = new PerformCleanupTask(cache); assertThat(task.exec()).isFalse(); assertThat(logEvents() .withMessage("Exception thrown when performing the maintenance task") .withThrowable(expected) .withLevel(ERROR) .exclusively()) .hasSize(1); }
@Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.about_menu_option: Navigation.findNavController(requireView()) .navigate(MainFragmentDirections.actionMainFragmentToAboutAnySoftKeyboardFragment()); return true; case R.id.tweaks_menu_option: Navigation.findNavController(requireView()) .navigate(MainFragmentDirections.actionMainFragmentToMainTweaksFragment()); return true; case R.id.backup_prefs: mDialogController.showDialog(R.id.backup_prefs); return true; case R.id.restore_prefs: mDialogController.showDialog(R.id.restore_prefs); return true; default: return super.onOptionsItemSelected(item); } }
@Test @Config(sdk = Build.VERSION_CODES.JELLY_BEAN_MR2) @Ignore("Robolectric does not support this API level") public void testRestoreMenuItemNotSupportedPreKitKat() throws Exception { final MainFragment fragment = startFragment(); final FragmentActivity activity = fragment.getActivity(); Menu menu = Shadows.shadowOf(activity).getOptionsMenu(); Assert.assertNotNull(menu); final MenuItem item = menu.findItem(R.id.restore_prefs); fragment.onOptionsItemSelected(item); TestRxSchedulers.foregroundFlushAllJobs(); final AlertDialog dialog = GeneralDialogTestUtil.getLatestShownDialog(); Assert.assertNotSame(GeneralDialogTestUtil.NO_DIALOG, dialog); Assert.assertEquals( getApplicationContext().getText(R.string.backup_restore_not_support_before_kitkat), GeneralDialogTestUtil.getTitleFromDialog(dialog)); }
public static UserAgent parse(String userAgentString) { return UserAgentParser.parse(userAgentString); }
@Test public void parseQQTest() { final String uaString = "User-Agent: MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1"; final UserAgent ua = UserAgentUtil.parse(uaString); assertEquals("QQBrowser", ua.getBrowser().toString()); assertEquals("26", ua.getVersion()); assertEquals("Webkit", ua.getEngine().toString()); assertEquals("533.1", ua.getEngineVersion()); assertEquals("Android", ua.getOs().toString()); assertEquals("2.3.7", ua.getOsVersion()); assertEquals("Android", ua.getPlatform().toString()); assertTrue(ua.isMobile()); }
protected boolean shouldAllowPreemptiveResponse(Channel channel) { // If the request timed-out while being read, then there won't have been any LastContent, but thats ok because // the connection will have to be discarded anyway. StatusCategory status = StatusCategoryUtils.getStatusCategory(ClientRequestReceiver.getRequestFromChannel(channel)); return status == ZuulStatusCategory.FAILURE_CLIENT_TIMEOUT; }
@Test void flagResponseBeforeRequestRead() { final ClientResponseWriter responseWriter = new ClientResponseWriter(new BasicRequestCompleteHandler()); final EmbeddedChannel channel = new EmbeddedChannel(); final SessionContext context = new SessionContext(); StatusCategoryUtils.setStatusCategory(context, ZuulStatusCategory.FAILURE_LOCAL); final HttpRequestMessage request = new HttpRequestBuilder(context).withDefaults(); channel.attr(ClientRequestReceiver.ATTR_ZUUL_REQ).set(request); assertThat(responseWriter.shouldAllowPreemptiveResponse(channel)).isFalse(); }
@Override public Column convert(BasicTypeDefine typeDefine) { Long typeDefineLength = typeDefine.getLength(); PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .columnLength(typeDefineLength) .scale(typeDefine.getScale()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String irisDataType = typeDefine.getDataType().toUpperCase(); long charOrBinaryLength = Objects.nonNull(typeDefineLength) && typeDefineLength > 0 ? typeDefineLength : 1; switch (irisDataType) { case IRIS_NULL: builder.dataType(BasicType.VOID_TYPE); break; case IRIS_BIT: builder.dataType(BasicType.BOOLEAN_TYPE); break; case IRIS_NUMERIC: case IRIS_MONEY: case IRIS_SMALLMONEY: case IRIS_NUMBER: case IRIS_DEC: case IRIS_DECIMAL: DecimalType decimalType; if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) { decimalType = new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale()); } else { decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE); } builder.dataType(decimalType); builder.columnLength(Long.valueOf(decimalType.getPrecision())); builder.scale(decimalType.getScale()); break; case IRIS_INT: case IRIS_INTEGER: case IRIS_MEDIUMINT: builder.dataType(BasicType.INT_TYPE); break; case IRIS_ROWVERSION: case IRIS_BIGINT: case IRIS_SERIAL: builder.dataType(BasicType.LONG_TYPE); break; case IRIS_TINYINT: builder.dataType(BasicType.BYTE_TYPE); break; case IRIS_SMALLINT: builder.dataType(BasicType.SHORT_TYPE); break; case IRIS_FLOAT: builder.dataType(BasicType.FLOAT_TYPE); break; case IRIS_DOUBLE: case IRIS_REAL: case IRIS_DOUBLE_PRECISION: builder.dataType(BasicType.DOUBLE_TYPE); break; case IRIS_CHAR: case IRIS_CHAR_VARYING: case IRIS_CHARACTER_VARYING: case IRIS_NATIONAL_CHAR: case IRIS_NATIONAL_CHAR_VARYING: case IRIS_NATIONAL_CHARACTER: case IRIS_NATIONAL_CHARACTER_VARYING: case IRIS_NATIONAL_VARCHAR: case IRIS_NCHAR: case IRIS_SYSNAME: case IRIS_VARCHAR2: case IRIS_VARCHAR: case IRIS_NVARCHAR: case IRIS_UNIQUEIDENTIFIER: case IRIS_GUID: case IRIS_CHARACTER: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(charOrBinaryLength); break; case IRIS_NTEXT: case IRIS_CLOB: case IRIS_LONG_VARCHAR: case IRIS_LONG: case IRIS_LONGTEXT: case IRIS_MEDIUMTEXT: case IRIS_TEXT: case IRIS_LONGVARCHAR: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(Long.valueOf(Integer.MAX_VALUE)); break; case IRIS_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case IRIS_TIME: builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); break; case IRIS_DATETIME: case IRIS_DATETIME2: case IRIS_SMALLDATETIME: case IRIS_TIMESTAMP: case IRIS_TIMESTAMP2: case IRIS_POSIXTIME: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); break; case IRIS_BINARY: case IRIS_BINARY_VARYING: case IRIS_RAW: case IRIS_VARBINARY: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(charOrBinaryLength); break; case IRIS_LONGVARBINARY: case IRIS_BLOB: case IRIS_IMAGE: case IRIS_LONG_BINARY: case IRIS_LONG_RAW: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(Long.valueOf(Integer.MAX_VALUE)); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.IRIS, irisDataType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertDate() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder().name("test").columnType("date").dataType("date").build(); Column column = IrisTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(LocalTimeType.LOCAL_DATE_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
public IssueQuery create(SearchRequest request) { try (DbSession dbSession = dbClient.openSession(false)) { final ZoneId timeZone = parseTimeZone(request.getTimeZone()).orElse(clock.getZone()); Collection<RuleDto> ruleDtos = ruleKeysToRuleId(dbSession, request.getRules()); Collection<String> ruleUuids = ruleDtos.stream().map(RuleDto::getUuid).collect(Collectors.toSet()); Collection<String> issueKeys = collectIssueKeys(dbSession, request); if (request.getRules() != null && request.getRules().stream().collect(Collectors.toSet()).size() != ruleDtos.size()) { ruleUuids.add("non-existing-uuid"); } IssueQuery.Builder builder = IssueQuery.builder() .issueKeys(issueKeys) .severities(request.getSeverities()) .cleanCodeAttributesCategories(request.getCleanCodeAttributesCategories()) .impactSoftwareQualities(request.getImpactSoftwareQualities()) .impactSeverities(request.getImpactSeverities()) .statuses(request.getStatuses()) .resolutions(request.getResolutions()) .issueStatuses(request.getIssueStatuses()) .resolved(request.getResolved()) .prioritizedRule(request.getPrioritizedRule()) .rules(ruleDtos) .ruleUuids(ruleUuids) .assigneeUuids(request.getAssigneeUuids()) .authors(request.getAuthors()) .scopes(request.getScopes()) .languages(request.getLanguages()) .tags(request.getTags()) .types(request.getTypes()) .pciDss32(request.getPciDss32()) .pciDss40(request.getPciDss40()) .owaspAsvs40(request.getOwaspAsvs40()) .owaspAsvsLevel(request.getOwaspAsvsLevel()) .owaspTop10(request.getOwaspTop10()) .owaspTop10For2021(request.getOwaspTop10For2021()) .stigAsdR5V3(request.getStigAsdV5R3()) .casa(request.getCasa()) .sansTop25(request.getSansTop25()) .cwe(request.getCwe()) .sonarsourceSecurity(request.getSonarsourceSecurity()) .assigned(request.getAssigned()) .createdAt(parseStartingDateOrDateTime(request.getCreatedAt(), timeZone)) .createdBefore(parseEndingDateOrDateTime(request.getCreatedBefore(), timeZone)) .facetMode(request.getFacetMode()) .timeZone(timeZone) .codeVariants(request.getCodeVariants()); List<ComponentDto> allComponents = new ArrayList<>(); boolean effectiveOnComponentOnly = mergeDeprecatedComponentParameters(dbSession, request, allComponents); addComponentParameters(builder, dbSession, effectiveOnComponentOnly, allComponents, request); setCreatedAfterFromRequest(dbSession, builder, request, allComponents, timeZone); String sort = request.getSort(); if (!isNullOrEmpty(sort)) { builder.sort(sort); builder.asc(request.getAsc()); } return builder.build(); } }
@Test public void add_unknown_when_no_component_found() { SearchRequest request = new SearchRequest() .setComponentKeys(asList("does_not_exist")); IssueQuery query = underTest.create(request); assertThat(query.componentUuids()).containsOnly("<UNKNOWN>"); }
public List<String> nodesWithTimedOutRequests(long now) { List<String> nodeIds = new ArrayList<>(); for (Map.Entry<String, Deque<NetworkClient.InFlightRequest>> requestEntry : requests.entrySet()) { String nodeId = requestEntry.getKey(); Deque<NetworkClient.InFlightRequest> deque = requestEntry.getValue(); if (hasExpiredRequest(now, deque)) nodeIds.add(nodeId); } return nodeIds; }
@Test public void testTimedOutNodes() { Time time = new MockTime(); addRequest("A", time.milliseconds(), 50); addRequest("B", time.milliseconds(), 200); addRequest("B", time.milliseconds(), 100); time.sleep(50); assertEquals(Collections.emptyList(), inFlightRequests.nodesWithTimedOutRequests(time.milliseconds())); time.sleep(25); assertEquals(Collections.singletonList("A"), inFlightRequests.nodesWithTimedOutRequests(time.milliseconds())); time.sleep(50); assertEquals(Arrays.asList("A", "B"), inFlightRequests.nodesWithTimedOutRequests(time.milliseconds())); }
public void setProperty(String name, String value) { if (value == null) { return; } Method setter = aggregationAssessor.findSetterMethod(name); if (setter == null) { addWarn("No setter for property [" + name + "] in " + objClass.getName() + "."); } else { try { setProperty(setter, value); } catch (PropertySetterException ex) { addWarn("Failed to set property [" + name + "] to value \"" + value + "\". ", ex); } } }
@Test public void charset() { setter.setProperty("charset", "UTF-8"); assertEquals(Charset.forName("UTF-8"), house.getCharset()); house.setCharset(null); setter.setProperty("charset", "UTF"); assertNull(house.getCharset()); StatusChecker checker = new StatusChecker(context); checker.containsException(UnsupportedCharsetException.class); }
public void cancelTimer(IPollEvents sink, int id) { assert (Thread.currentThread() == worker); TimerInfo copy = new TimerInfo(sink, id); // Complexity of this operation is O(n). We assume it is rarely used. TimerInfo timerInfo = timers.find(copy); if (timerInfo != null) { // let's defer the removal during the loop timerInfo.cancelled = true; } }
@Test public void testCancelTimer() { final PollerBaseTested poller = new PollerBaseTested(); poller.addTimer(1000, sink, 1); long timeout = poller.executeTimers(); assertThat(timeout, is(1000L)); assertThat(poller.isEmpty(), is(false)); poller.cancelTimer(sink, 1); timeout = poller.executeTimers(); assertThat(timeout, is(0L)); assertThat(poller.isEmpty(), is(true)); }
public static InputStream limitedInputStream(final InputStream is, final int limit) throws IOException { return new InputStream() { private int mPosition = 0; private int mMark = 0; private final int mLimit = Math.min(limit, is.available()); @Override public int read() throws IOException { if (mPosition < mLimit) { mPosition++; return is.read(); } return -1; } @Override public int read(byte[] b, int off, int len) throws IOException { if (b == null) { throw new NullPointerException(); } if (off < 0 || len < 0 || len > b.length - off) { throw new IndexOutOfBoundsException(); } if (mPosition >= mLimit) { return -1; } if (mPosition + len > mLimit) { len = mLimit - mPosition; } if (len <= 0) { return 0; } is.read(b, off, len); mPosition += len; return len; } @Override public long skip(long len) throws IOException { if (mPosition + len > mLimit) { len = mLimit - mPosition; } if (len <= 0) { return 0; } is.skip(len); mPosition += len; return len; } @Override public int available() { return mLimit - mPosition; } @Override public boolean markSupported() { return is.markSupported(); } @Override public synchronized void mark(int readlimit) { is.mark(readlimit); mMark = mPosition; } @Override public synchronized void reset() throws IOException { is.reset(); mPosition = mMark; } @Override public void close() throws IOException { is.close(); } }; }
@Test void testReadEmptyByteArray() { Assertions.assertThrows(NullPointerException.class, () -> { InputStream is = StreamUtilsTest.class.getResourceAsStream("/StreamUtilsTest.txt"); try { is = StreamUtils.limitedInputStream(is, 2); is.read(null, 0, 1); } finally { if (is != null) { is.close(); } } }); }
public static <@NonNull E> CompletableSource resolveScopeFromLifecycle( final LifecycleScopeProvider<E> provider) throws OutsideScopeException { return resolveScopeFromLifecycle(provider, true); }
@Test public void resolveScopeFromLifecycle_error() { PublishSubject<Integer> lifecycle = PublishSubject.create(); TestObserver<?> o = testSource(resolveScopeFromLifecycle(lifecycle, 3)); lifecycle.onNext(0); o.assertNoErrors().assertNotComplete(); lifecycle.onNext(1); o.assertNoErrors().assertNotComplete(); // Now we end RuntimeException expected = new RuntimeException("Expected"); lifecycle.onError(expected); o.assertError(expected); }
public static Range<Integer> integerRange(String range) { return ofString(range, Integer::parseInt, Integer.class); }
@Test public void testUnboundedRangeStringIsRejected() { PostgreSQLGuavaRangeType instance = PostgreSQLGuavaRangeType.INSTANCE; assertEquals(Range.all(), instance.integerRange("(,)")); }
public boolean updateTenantCapacity(String tenant, Integer quota, Integer maxSize, Integer maxAggrCount, Integer maxAggrSize) { List<Object> argList = CollectionUtils.list(); List<String> columns = new ArrayList<>(); if (quota != null) { columns.add("quota"); argList.add(quota); } if (maxSize != null) { columns.add("max_size"); argList.add(maxSize); } if (maxAggrCount != null) { columns.add("max_aggr_count"); argList.add(maxAggrCount); } if (maxAggrSize != null) { columns.add("max_aggr_size"); argList.add(maxAggrSize); } columns.add("gmt_modified"); argList.add(TimeUtils.getCurrentTime()); List<String> where = new ArrayList<>(); where.add("tenant_id"); argList.add(tenant); TenantCapacityMapper tenantCapacityMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(), TableConstant.TENANT_CAPACITY); String sql = tenantCapacityMapper.update(columns, where); try { return jdbcTemplate.update(sql, argList.toArray()) == 1; } catch (CannotGetJdbcConnectionException e) { FATAL_LOG.error("[db-error]", e); throw e; } }
@Test void testUpdateTenantCapacity() { final MockedStatic<TimeUtils> timeUtilsMockedStatic = Mockito.mockStatic(TimeUtils.class); List<Object> argList = CollectionUtils.list(); Integer quota = 1; argList.add(quota); Integer maxSize = 2; argList.add(maxSize); Integer maxAggrCount = 3; argList.add(maxAggrCount); Integer maxAggrSize = 4; argList.add(maxAggrSize); Timestamp timestamp = new Timestamp(System.currentTimeMillis()); timeUtilsMockedStatic.when(TimeUtils::getCurrentTime).thenReturn(timestamp); argList.add(timestamp); String tenant = "test"; argList.add(tenant); when(jdbcTemplate.update(anyString(), any(Object.class))).thenAnswer((Answer<Integer>) invocationOnMock -> { if (invocationOnMock.getArgument(1).equals(quota) && invocationOnMock.getArgument(2).equals(maxSize) && invocationOnMock.getArgument(3).equals(maxAggrCount) && invocationOnMock.getArgument(4).equals(maxAggrSize) && invocationOnMock.getArgument(5).equals(timestamp) && invocationOnMock.getArgument(6).equals(tenant)) { return 1; } return 0; }); assertTrue(service.updateTenantCapacity(tenant, quota, maxSize, maxAggrCount, maxAggrSize)); timeUtilsMockedStatic.close(); }
@Override public boolean equals(Object o) { if (!(o instanceof Path)) { return false; } Path that = (Path)o; return this.uri.equals(that.uri); }
@Test (timeout = 30000) public void testEquals() { assertFalse(new Path("/").equals(new Path("/foo"))); }
public static ShenyuAdminResult success() { return success(""); }
@Test public void testSuccessWithMsg() { final ShenyuAdminResult result = ShenyuAdminResult.success("msg"); assertEquals(CommonErrorCode.SUCCESSFUL, result.getCode().intValue()); assertEquals("msg", result.getMessage()); assertNull(result.getData()); assertEquals(3582918, result.hashCode()); assertEquals("ShenyuAdminResult{code=200, message='msg', data=null}", result.toString()); }
public static Split split(String regex) { return split(Pattern.compile(regex), false); }
@Test @Category(NeedsRunner.class) public void testSplitsWithoutEmpty() { PCollection<String> output = p.apply(Create.of("The quick brown fox jumps over the lazy dog")) .apply(Regex.split("\\s", false)); PAssert.that(output) .containsInAnyOrder("The", "quick", "brown", "fox", "jumps", "over", "the", "lazy", "dog"); p.run(); }
@Override public List<TransferItem> list(final Session<?> session, final Path directory, final Local local, final ListProgressListener listener) throws BackgroundException { if(log.isDebugEnabled()) { log.debug(String.format("List children for %s", directory)); } if(directory.isSymbolicLink() && new DownloadSymlinkResolver(roots).resolve(directory)) { if(log.isDebugEnabled()) { log.debug(String.format("Do not list children for symbolic link %s", directory)); } return Collections.emptyList(); } else { final AttributedList<Path> list; if(cache.isCached(directory)) { list = cache.get(directory); } else { list = session.getFeature(ListService.class).list(directory, listener); cache.put(directory, list); } final List<TransferItem> children = new ArrayList<>(); // Return copy with filtered result only for(Path f : new AttributedList<>(list.filter(comparator, filter))) { children.add(new TransferItem(f, LocalFactory.get(local, f.getName()))); } return children; } }
@Test public void testList() throws Exception { final Path root = new Path("/t", EnumSet.of(Path.Type.directory)); Transfer t = new DownloadTransfer(new Host(new TestProtocol()), root, new NullLocal("l")); final NullSession session = new NullSession(new Host(new TestProtocol())) { @Override public AttributedList<Path> list(final Path file, final ListProgressListener listener) { final AttributedList<Path> children = new AttributedList<>(); children.add(new Path("/t/c", EnumSet.of(Path.Type.file))); return children; } }; assertEquals(Collections.singletonList(new TransferItem(new Path("/t/c", EnumSet.of(Path.Type.file)), new NullLocal("t", "c"))), t.list(session, root, new NullLocal("t") { @Override public boolean exists() { return true; } }, new DisabledListProgressListener()) ); }
public static HollowSchema parseSchema(String schema) throws IOException { StreamTokenizer tokenizer = new StreamTokenizer(new StringReader(schema)); configureTokenizer(tokenizer); return parseSchema(tokenizer); }
@Test public void parsesSetSchemaWithKey() throws IOException { String listSchema = "SetOfTypeA Set<TypeA> @HashKey(id.value);\n"; HollowSetSchema schema = (HollowSetSchema) HollowSchemaParser.parseSchema(listSchema); Assert.assertEquals("SetOfTypeA", schema.getName()); Assert.assertEquals("TypeA", schema.getElementType()); Assert.assertEquals(new PrimaryKey("TypeA", "id.value"), schema.getHashKey()); Assert.assertEquals(schema, HollowSchemaParser.parseSchema(schema.toString())); }
public int run(String[] args) throws Exception { if (args.length == 0) { System.err.println("Too few arguments!"); printUsage(); return 1; } Path path = new Path(args[0]); FileSystem fs = path.getFileSystem(getConf()); if (fs.exists(path)) { System.err.println("given path exists already!"); return -1; } TypedBytesInput tbinput = new TypedBytesInput(new DataInputStream(System.in)); SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, path, TypedBytesWritable.class, TypedBytesWritable.class); try { TypedBytesWritable key = new TypedBytesWritable(); TypedBytesWritable value = new TypedBytesWritable(); byte[] rawKey = tbinput.readRaw(); while (rawKey != null) { byte[] rawValue = tbinput.readRaw(); key.set(rawKey, 0, rawKey.length); value.set(rawValue, 0, rawValue.length); writer.append(key, value); rawKey = tbinput.readRaw(); } } finally { writer.close(); } return 0; }
@Test public void testLoading() throws Exception { Configuration conf = new Configuration(); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2) .build(); FileSystem fs = cluster.getFileSystem(); ByteArrayOutputStream out = new ByteArrayOutputStream(); TypedBytesOutput tboutput = new TypedBytesOutput(new DataOutputStream(out)); for (int i = 0; i < 100; i++) { tboutput.write(new Long(i)); // key tboutput.write("" + (10 * i)); // value } InputStream isBackup = System.in; ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); System.setIn(in); LoadTypedBytes loadtb = new LoadTypedBytes(conf); try { Path root = new Path("/typedbytestest"); assertTrue(fs.mkdirs(root)); assertTrue(fs.exists(root)); String[] args = new String[1]; args[0] = "/typedbytestest/test.seq"; int ret = loadtb.run(args); assertEquals("Return value != 0.", 0, ret); Path file = new Path(root, "test.seq"); assertTrue(fs.exists(file)); SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf); int counter = 0; TypedBytesWritable key = new TypedBytesWritable(); TypedBytesWritable value = new TypedBytesWritable(); while (reader.next(key, value)) { assertEquals(Long.class, key.getValue().getClass()); assertEquals(String.class, value.getValue().getClass()); assertTrue("Invalid record.", Integer.parseInt(value.toString()) % 10 == 0); counter++; } assertEquals("Wrong number of records.", 100, counter); } finally { try { fs.close(); } catch (Exception e) { } System.setIn(isBackup); cluster.shutdown(); } }
public static <@NonNull E> CompletableSource resolveScopeFromLifecycle( final LifecycleScopeProvider<E> provider) throws OutsideScopeException { return resolveScopeFromLifecycle(provider, true); }
@Test public void lifecycleCheckEnd_shouldFailIfEndedWithNoHandler() { TestLifecycleScopeProvider lifecycle = TestLifecycleScopeProvider.createInitial(STOPPED); try { testSource(resolveScopeFromLifecycle(lifecycle, true)); throw new AssertionError("Lifecycle resolution should have failed due to it being ended."); } catch (LifecycleEndedException ignored) { } }
@Override public void run(DiagnosticsLogWriter writer) { writer.startSection("BuildInfo"); writer.writeKeyValueEntry("Build", buildInfo.getBuild()); // we convert to string to prevent formatting the number writer.writeKeyValueEntry("BuildNumber", "" + buildInfo.getBuildNumber()); writer.writeKeyValueEntry("Revision", buildInfo.getRevision()); BuildInfo upstreamBuildInfo = buildInfo.getUpstreamBuildInfo(); if (upstreamBuildInfo != null) { writer.writeKeyValueEntry("UpstreamRevision", upstreamBuildInfo.getRevision()); } writer.writeKeyValueEntry("Version", buildInfo.getVersion()); writer.writeKeyValueEntry("SerialVersion", buildInfo.getSerializationVersion()); writer.writeKeyValueEntry("Enterprise", buildInfo.isEnterprise()); writer.endSection(); }
@Test public void test() { plugin.run(logWriter); BuildInfo buildInfo = BuildInfoProvider.getBuildInfo(); assertContains("BuildNumber=" + buildInfo.getBuildNumber()); assertContains("Build=" + buildInfo.getBuild()); assertContains("Revision=" + buildInfo.getRevision()); assertContains("Version=" + buildInfo.getVersion()); assertContains("SerialVersion=" + buildInfo.getSerializationVersion()); assertContains("Enterprise=" + buildInfo.isEnterprise()); }
@Override public Long createMailAccount(MailAccountSaveReqVO createReqVO) { MailAccountDO account = BeanUtils.toBean(createReqVO, MailAccountDO.class); mailAccountMapper.insert(account); return account.getId(); }
@Test public void testCreateMailAccount_success() { // 准备参数 MailAccountSaveReqVO reqVO = randomPojo(MailAccountSaveReqVO.class, o -> o.setMail(randomEmail())) .setId(null); // 防止 id 被赋值 // 调用 Long mailAccountId = mailAccountService.createMailAccount(reqVO); // 断言 assertNotNull(mailAccountId); // 校验记录的属性是否正确 MailAccountDO mailAccount = mailAccountMapper.selectById(mailAccountId); assertPojoEquals(reqVO, mailAccount, "id"); }
@Override public boolean remove(long key1, long key2) { return super.remove0(key1, key2); }
@Test public void testRemove() { final long key1 = randomKey(); final long key2 = randomKey(); insert(key1, key2); assertTrue(hsa.remove(key1, key2)); assertFalse(hsa.remove(key1, key2)); }
public static void runBeforeProcessing(PipelineOptions options) { // We load the logger in the method to minimize the amount of class loading that happens // during class initialization. Logger logger = LoggerFactory.getLogger(JvmInitializers.class); for (JvmInitializer initializer : ReflectHelpers.loadServicesOrdered(JvmInitializer.class)) { logger.info("Running JvmInitializer#beforeProcessing for {}", initializer); initializer.beforeProcessing(options); logger.info("Completed JvmInitializer#beforeProcessing for {}", initializer); } }
@Test public void runBeforeProcessing_runsInitializersWithOptions() { PipelineOptions options = TestPipeline.testingPipelineOptions(); JvmInitializers.runBeforeProcessing(options); assertTrue(beforeProcessingRan); assertEquals(options, receivedOptions); expectedLogs.verifyInfo("Running JvmInitializer#beforeProcessing"); }
@Override public WidgetsBundle findWidgetsBundleByTenantIdAndAlias(UUID tenantId, String alias) { return DaoUtil.getData(widgetsBundleRepository.findWidgetsBundleByTenantIdAndAlias(tenantId, alias)); }
@Test public void testFindWidgetsBundleByTenantIdAndAlias() { createSystemWidgetBundles(1, "WB_"); WidgetsBundle widgetsBundle = widgetsBundleDao.findWidgetsBundleByTenantIdAndAlias( TenantId.SYS_TENANT_ID.getId(), "WB_" + 0); widgetsBundles = List.of(widgetsBundle); assertEquals("WB_" + 0, widgetsBundle.getAlias()); }
public List<NacosServiceInstance> getInstances(String serviceId) { try { return Optional.of(nacosServiceDiscovery.getInstances(serviceId)) .map(instances -> { ServiceCache.setInstances(serviceId, instances); return instances; }).get(); } catch (NacosException e) { LOGGER.log(Level.SEVERE, String.format(Locale.ENGLISH, "getInstances failed from nacos," + "serviceId={%s},isFailureToleranceEnabled={%s}", serviceId, nacosRegisterConfig.isFailureToleranceEnabled()), e); if (nacosRegisterConfig.isFailureToleranceEnabled()) { return ServiceCache.getInstances(serviceId); } return Collections.emptyList(); } }
@Test public void testGetInstances() throws NacosException { mockNamingService(); Assert.assertNotNull(nacosClient.getInstances("test")); }
@Override public void doSubscribe(URL url, NotifyListener listener) { url = addRegistryClusterKey(url); serviceDiscovery.subscribe(url, listener); Set<String> mappingByUrl = ServiceNameMapping.getMappingByUrl(url); String key = ServiceNameMapping.buildMappingKey(url); if (mappingByUrl == null) { Lock mappingLock = serviceNameMapping.getMappingLock(key); try { mappingLock.lock(); mappingByUrl = serviceNameMapping.getMapping(url); try { MappingListener mappingListener = new DefaultMappingListener(url, mappingByUrl, listener); mappingByUrl = serviceNameMapping.getAndListen(this.getUrl(), url, mappingListener); synchronized (mappingListeners) { mappingListeners .computeIfAbsent(url.getProtocolServiceKey(), (k) -> new ConcurrentHashSet<>()) .add(mappingListener); } } catch (Exception e) { logger.warn( INTERNAL_ERROR, "", "", "Cannot find app mapping for service " + url.getServiceInterface() + ", will not migrate.", e); } if (CollectionUtils.isEmpty(mappingByUrl)) { logger.info( "No interface-apps mapping found in local cache, stop subscribing, will automatically wait for mapping listener callback: " + url); // if (check) { // throw new IllegalStateException("Should has at least one way to know which // services this interface belongs to, subscription url: " + url); // } return; } } finally { mappingLock.unlock(); } } subscribeURLs(url, listener, mappingByUrl); }
@Test void testDoSubscribe() { ApplicationModel applicationModel = spy(ApplicationModel.defaultModel()); when(applicationModel.getDefaultExtension(ServiceNameMapping.class)).thenReturn(mapping); // Exceptional case, no interface-app mapping found when(mapping.getAndListen(any(), any(), any())).thenReturn(Collections.emptySet()); // when check = false try { registryURL = registryURL.setScopeModel(applicationModel); serviceDiscoveryRegistry = new ServiceDiscoveryRegistry(registryURL, serviceDiscovery, mapping); serviceDiscoveryRegistry.doSubscribe(url, testServiceListener); } finally { registryURL = registryURL.setScopeModel(null); serviceDiscoveryRegistry.unsubscribe(url, testServiceListener); } // // when check = true URL checkURL = url.addParameter(CHECK_KEY, true); checkURL.setScopeModel(url.getApplicationModel()); // Exception exceptionShouldHappen = null; // try { // serviceDiscoveryRegistry.doSubscribe(checkURL, testServiceListener); // } catch (IllegalStateException e) { // exceptionShouldHappen = e; // } finally { // serviceDiscoveryRegistry.unsubscribe(checkURL, testServiceListener); // } // if (exceptionShouldHappen == null) { // fail(); // } // Normal case Set<String> singleApp = new HashSet<>(); singleApp.add(APP_NAME1); when(mapping.getAndListen(any(), any(), any())).thenReturn(singleApp); try { serviceDiscoveryRegistry.doSubscribe(checkURL, testServiceListener); } finally { serviceDiscoveryRegistry.unsubscribe(checkURL, testServiceListener); } // test provider case checkURL = url.addParameter(PROVIDED_BY, APP_NAME1); try { serviceDiscoveryRegistry.doSubscribe(checkURL, testServiceListener); } finally { serviceDiscoveryRegistry.unsubscribe(checkURL, testServiceListener); } }
protected String getCurrentReleaseVersion() { HttpURLConnection conn = null; try { final String str = settings.getString(Settings.KEYS.ENGINE_VERSION_CHECK_URL, "https://jeremylong.github.io/DependencyCheck/current.txt"); final URL url = new URL(str); final URLConnectionFactory factory = new URLConnectionFactory(settings); conn = factory.createHttpURLConnection(url); conn.connect(); if (conn.getResponseCode() != 200) { return null; } try (InputStream is = conn.getInputStream()) { final String releaseVersion = new String(IOUtils.toByteArray(is), StandardCharsets.UTF_8); return releaseVersion.trim(); } } catch (MalformedURLException ex) { LOGGER.debug("Unable to retrieve current release version of dependency-check - malformed url?"); } catch (URLConnectionFailureException ex) { LOGGER.debug("Unable to retrieve current release version of dependency-check - connection failed"); } catch (IOException ex) { LOGGER.debug("Unable to retrieve current release version of dependency-check - i/o exception"); } finally { if (conn != null) { conn.disconnect(); } } return null; }
@Test public void testGetCurrentReleaseVersion() { EngineVersionCheck instance = new EngineVersionCheck(getSettings()); DependencyVersion minExpResult = new DependencyVersion("1.2.6"); String release = instance.getCurrentReleaseVersion(); DependencyVersion result = new DependencyVersion(release); assertTrue(minExpResult.compareTo(result) <= 0); }
@POST @ZeppelinApi public Response createNote(String message) throws IOException { String user = authenticationService.getPrincipal(); LOGGER.info("Creating new note by JSON {}", message); NewNoteRequest request = GSON.fromJson(message, NewNoteRequest.class); String defaultInterpreterGroup = request.getDefaultInterpreterGroup(); if (StringUtils.isBlank(defaultInterpreterGroup)) { defaultInterpreterGroup = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT); } String noteId = notebookService.createNote( request.getName(), defaultInterpreterGroup, request.getAddingEmptyParagraph(), getServiceContext(), new RestServiceCallback<>()); return notebook.processNote(noteId, note -> { AuthenticationInfo subject = new AuthenticationInfo(authenticationService.getPrincipal()); if (request.getParagraphs() != null) { for (NewParagraphRequest paragraphRequest : request.getParagraphs()) { Paragraph p = note.addNewParagraph(subject); initParagraph(p, paragraphRequest, user); } } return new JsonResponse<>(Status.OK, "", note.getId()).build(); }); }
@Test void testGetReloadNote() throws IOException { LOG.info("Running testGetNote"); String note1Id = null; try { note1Id = notebook.createNote("note1", anonymous); notebook.processNote(note1Id, note1 -> { note1.addNewParagraph(AuthenticationInfo.ANONYMOUS); notebook.saveNote(note1, anonymous); return null; }); CloseableHttpResponse get = httpGet("/notebook/" + note1Id); assertThat(get, isAllowed()); Map<String, Object> resp = gson.fromJson(EntityUtils.toString(get.getEntity(), StandardCharsets.UTF_8), new TypeToken<Map<String, Object>>() {}.getType()); Map<String, Object> noteObject = (Map<String, Object>) resp.get("body"); assertEquals(1, ((List)noteObject.get("paragraphs")).size()); // add one new paragraph, but don't save it and reload it again notebook.processNote(note1Id, note1 -> { note1.addNewParagraph(AuthenticationInfo.ANONYMOUS); return null; }); get = httpGet("/notebook/" + note1Id + "?reload=true"); assertThat(get, isAllowed()); resp = gson.fromJson(EntityUtils.toString(get.getEntity(), StandardCharsets.UTF_8), new TypeToken<Map<String, Object>>() {}.getType()); noteObject = (Map<String, Object>) resp.get("body"); assertEquals(1, ((List)noteObject.get("paragraphs")).size()); get.close(); } finally { // cleanup if (null != note1Id) { notebook.removeNote(note1Id, anonymous); } } }
@PutMapping(value = "/log") @Secured(action = ActionTypes.WRITE, resource = "nacos/admin", signType = SignType.CONSOLE) public RestResult<Void> updateLog(@RequestBody LogUpdateRequest logUpdateRequest) { Loggers.setLogLevel(logUpdateRequest.getLogName(), logUpdateRequest.getLogLevel()); return RestResultUtils.success(); }
@Test void testSetLogLevel() { LogUpdateRequest request = new LogUpdateRequest(); request.setLogName("core"); request.setLogLevel("debug"); RestResult<?> res = coreOpsV2Controller.updateLog(request); assertTrue(res.ok()); assertTrue(Loggers.CORE.isDebugEnabled()); }
@SuppressWarnings("unchecked") public static <T> AgentServiceLoader<T> getServiceLoader(final Class<T> service) { return (AgentServiceLoader<T>) LOADERS.computeIfAbsent(service, AgentServiceLoader::new); }
@Test void assertGetServiceLoaderWithImplementSPI() { AgentServiceLoader<AgentServiceSPIFixture> actual = AgentServiceLoader.getServiceLoader(AgentServiceSPIFixture.class); assertThat(actual.getServices().size(), is(1)); AgentServiceSPIFixture actualInstance = actual.getServices().iterator().next(); assertThat(actualInstance, instanceOf(AgentServiceSPIFixtureImpl.class)); assertThat(actualInstance, is(AgentServiceLoader.getServiceLoader(AgentServiceSPIFixture.class).getServices().iterator().next())); }
public Map<String, DataSourceConfiguration> loadDataSourceConfigurations(final String databaseName) { return dataSourceUnitService.load(databaseName).entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.createConfiguration(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); }
@Test void assertLoadDataSourceConfigurations() { assertTrue(metaDataPersistService.loadDataSourceConfigurations("foo_db").isEmpty()); }
@Override public boolean isEnhanced() { return true; }
@Test void testIsEnhancedAlwaysTrueAsTiered() { assertTrue(service.isEnhanced()); }
@Nonnull @Override public Optional<Signature> parse( @Nullable String str, @Nonnull DetectionLocation detectionLocation) { if (str == null) { return Optional.empty(); } final String generalizedStr = str.toLowerCase().trim(); if (!generalizedStr.contains("with")) { return map(str, detectionLocation); } int hashEndPos = generalizedStr.indexOf("with"); String digestStr = str.substring(0, hashEndPos); JcaMessageDigestMapper jcaMessageDigestMapper = new JcaMessageDigestMapper(); final Optional<MessageDigest> messageDigestOptional = jcaMessageDigestMapper.parse(digestStr, detectionLocation); int encryptStartPos = hashEndPos + 4; String signatureStr = str.substring(encryptStartPos); final String format; if (generalizedStr.contains("in") && generalizedStr.contains("format")) { int inStartPos = generalizedStr.indexOf("in"); int inEndPos = inStartPos + 2; signatureStr = str.substring(encryptStartPos, inStartPos); format = str.substring(inEndPos); } else { format = null; } return map(signatureStr, detectionLocation) .map( signature -> { messageDigestOptional.ifPresent(signature::put); if (format != null) { signature.put(new OutputFormat(format, detectionLocation)); } return signature; }); }
@Test void SHA384withDSA() { DetectionLocation testDetectionLocation = new DetectionLocation("testfile", 1, 1, List.of("test"), () -> "SSL"); JcaSignatureMapper jcaSignatureMapper = new JcaSignatureMapper(); Optional<Signature> signatureOptional = jcaSignatureMapper.parse("SHA384withDSA", testDetectionLocation); assertThat(signatureOptional).isPresent(); assertThat(signatureOptional.get()).isInstanceOf(DSA.class); assertThat(signatureOptional.get().getFormat()).isEmpty(); assertThat(signatureOptional.get().getDigest()).isPresent(); MessageDigest messageDigest = signatureOptional.get().getDigest().get(); assertThat(messageDigest).isInstanceOf(SHA2.class); assertThat(messageDigest.getName()).isEqualTo("SHA384"); assertThat(messageDigest.getDigestSize()).isPresent(); assertThat(messageDigest.getDigestSize().get().getValue()).isEqualTo(384); }
public SqlType getExpressionSqlType(final Expression expression) { return getExpressionSqlType(expression, Collections.emptyMap()); }
@Test public void shouldEvaluateBooleanSchemaForNotLikeExpression() { final Expression expression = new NotExpression(new LikePredicate(COL1, new StringLiteral("%foo"), Optional.empty())); final SqlType exprType0 = expressionTypeManager.getExpressionSqlType(expression); assertThat(exprType0, is(SqlTypes.BOOLEAN)); }
@Override public String getGroupKeyString(int rowIndex, int groupKeyColumnIndex) { throw new AssertionError("No group key string for result table"); }
@Test(expectedExceptions = AssertionError.class) public void testGetGroupKeyString() { // Run the test _resultTableResultSetUnderTest.getGroupKeyString(0, 0); }
@Override public List<JreInfoRestResponse> getJresMetadata(@Nullable String os, @Nullable String arch) { Predicate<JreInfoRestResponse> osFilter = isBlank(os) ? jre -> true : (jre -> OS.from(jre.os()) == OS.from(os)); Predicate<JreInfoRestResponse> archFilter = isBlank(arch) ? jre -> true : (jre -> Arch.from(jre.arch()) == Arch.from(arch)); return metadata.values().stream() .filter(osFilter) .filter(archFilter) .toList(); }
@Test void getJresMetadata_shouldReturnEmptyList_whenNoMetadata() { List<JreInfoRestResponse> result = jresHandler.getJresMetadata("windows", "x64"); assertThat(result).isEmpty(); }
public static DynamicVoter parse(String input) { input = input.trim(); int atIndex = input.indexOf("@"); if (atIndex < 0) { throw new IllegalArgumentException("No @ found in dynamic voter string."); } if (atIndex == 0) { throw new IllegalArgumentException("Invalid @ at beginning of dynamic voter string."); } String idString = input.substring(0, atIndex); int nodeId; try { nodeId = Integer.parseInt(idString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse node id in dynamic voter string.", e); } if (nodeId < 0) { throw new IllegalArgumentException("Invalid negative node id " + nodeId + " in dynamic voter string."); } input = input.substring(atIndex + 1); if (input.isEmpty()) { throw new IllegalArgumentException("No hostname found after node id."); } String host; if (input.startsWith("[")) { int endBracketIndex = input.indexOf("]"); if (endBracketIndex < 0) { throw new IllegalArgumentException("Hostname began with left bracket, but no right " + "bracket was found."); } host = input.substring(1, endBracketIndex); input = input.substring(endBracketIndex + 1); } else { int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following hostname could be found."); } host = input.substring(0, endColonIndex); input = input.substring(endColonIndex); } if (!input.startsWith(":")) { throw new IllegalArgumentException("Port section must start with a colon."); } input = input.substring(1); int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following port could be found."); } String portString = input.substring(0, endColonIndex); int port; try { port = Integer.parseInt(portString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse port in dynamic voter string.", e); } if (port < 0 || port > 65535) { throw new IllegalArgumentException("Invalid port " + port + " in dynamic voter string."); } String directoryIdString = input.substring(endColonIndex + 1); Uuid directoryId; try { directoryId = Uuid.fromString(directoryIdString); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Failed to parse directory ID in dynamic voter string.", e); } return new DynamicVoter(directoryId, nodeId, host, port); }
@Test public void testParseDynamicVoterWithUnbalancedBrackets() { assertEquals("Hostname began with left bracket, but no right bracket was found.", assertThrows(IllegalArgumentException.class, () -> DynamicVoter.parse("5@[2001:4860:4860::8888:8020:__0IZ-0DRNazJ49kCZ1EMQ")). getMessage()); }
public void start() throws Exception { this.producerNameGenerator = new DistributedIdGenerator(pulsar.getCoordinationService(), PRODUCER_NAME_GENERATOR_PATH, pulsar.getConfiguration().getClusterName()); ServiceConfiguration serviceConfig = pulsar.getConfiguration(); List<BindAddress> bindAddresses = BindAddressValidator.validateBindAddresses(serviceConfig, Arrays.asList("pulsar", "pulsar+ssl")); String internalListenerName = serviceConfig.getInternalListenerName(); // create a channel for each bind address if (bindAddresses.size() == 0) { throw new IllegalArgumentException("At least one broker bind address must be configured"); } for (BindAddress a : bindAddresses) { InetSocketAddress addr = new InetSocketAddress(a.getAddress().getHost(), a.getAddress().getPort()); boolean isTls = "pulsar+ssl".equals(a.getAddress().getScheme()); PulsarChannelInitializer.PulsarChannelOptions opts = PulsarChannelInitializer.PulsarChannelOptions.builder() .enableTLS(isTls) .listenerName(a.getListenerName()).build(); ServerBootstrap b = defaultServerBootstrap.clone(); b.childHandler( pulsarChannelInitFactory.newPulsarChannelInitializer(pulsar, opts)); try { Channel ch = b.bind(addr).sync().channel(); listenChannels.add(ch); // identify the primary channel. Note that the legacy bindings appear first and have no listener. if (StringUtils.isBlank(a.getListenerName()) || StringUtils.equalsIgnoreCase(a.getListenerName(), internalListenerName)) { if (this.listenChannel == null && !isTls) { this.listenChannel = ch; } if (this.listenChannelTls == null && isTls) { this.listenChannelTls = ch; } } log.info("Started Pulsar Broker service on {}, TLS: {}, listener: {}", ch.localAddress(), isTls ? SslContext.defaultServerProvider().toString() : "(none)", StringUtils.defaultString(a.getListenerName(), "(none)")); } catch (Exception e) { throw new IOException("Failed to bind Pulsar broker on " + addr, e); } } // start other housekeeping functions this.startStatsUpdater( serviceConfig.getStatsUpdateInitialDelayInSecs(), serviceConfig.getStatsUpdateFrequencyInSecs()); this.startInactivityMonitor(); this.startMessageExpiryMonitor(); this.startCompactionMonitor(); this.startConsumedLedgersMonitor(); this.startBacklogQuotaChecker(); this.updateBrokerPublisherThrottlingMaxRate(); this.updateBrokerDispatchThrottlingMaxRate(); this.startCheckReplicationPolicies(); this.startDeduplicationSnapshotMonitor(); this.startClearInvalidateTopicNameCacheTask(); }
@Test public void shouldNotPreventCreatingTopicWhenNonexistingTopicIsCached() throws Exception { // run multiple iterations to increase the chance of reproducing a race condition in the topic cache for (int i = 0; i < 100; i++) { final String topicName = "persistent://prop/ns-abc/topic-caching-test-topic" + i; CountDownLatch latch = new CountDownLatch(1); Thread getStatsThread = new Thread(() -> { try { latch.countDown(); // create race condition with a short delay // the bug might not reproduce in all environments, this works at least on i7-10750H CPU Thread.sleep(1); admin.topics().getStats(topicName); fail("The topic should not exist yet."); } catch (PulsarAdminException.NotFoundException e) { // expected exception } catch (PulsarAdminException | InterruptedException e) { log.error("Exception in {}", Thread.currentThread().getName(), e); } }, "getStatsThread#" + i); getStatsThread.start(); latch.await(); @Cleanup Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create(); assertNotNull(producer); getStatsThread.join(); } }
@Override public void createFloatingIp(KubevirtFloatingIp floatingIp) { checkNotNull(floatingIp, ERR_NULL_FLOATING_IP); checkArgument(!Strings.isNullOrEmpty(floatingIp.id()), ERR_NULL_FLOATING_IP_ID); kubevirtRouterStore.createFloatingIp(floatingIp); log.info(String.format(MSG_FLOATING_IP, floatingIp.floatingIp(), MSG_CREATED)); }
@Test(expected = IllegalArgumentException.class) public void testCreateDuplicateFloatingIp() { target.createFloatingIp(FLOATING_IP_ASSOCIATED); target.createFloatingIp(FLOATING_IP_DISASSOCIATED); }
static BsonTimestamp startAtTimestamp(Map<String, String> options) { String startAtValue = options.get(START_AT_OPTION); if (isNullOrEmpty(startAtValue)) { throw QueryException.error("startAt property is required for MongoDB stream. " + POSSIBLE_VALUES); } if ("now".equalsIgnoreCase(startAtValue)) { return MongoUtilities.bsonTimestampFromTimeMillis(System.currentTimeMillis()); } else { try { return MongoUtilities.bsonTimestampFromTimeMillis(Long.parseLong(startAtValue)); } catch (NumberFormatException e) { try { return MongoUtilities.bsonTimestampFromTimeMillis(Instant.parse(startAtValue).toEpochMilli()); } catch (DateTimeParseException ex) { throw QueryException.error("Invalid startAt value: '" + startAtValue + "'. " + POSSIBLE_VALUES); } } } }
@Test public void parses_dateTimeString_startAt() { // given long time = System.currentTimeMillis(); LocalDateTime timeDate = LocalDateTime.ofEpochSecond(time / 1000, 0, UTC); String dateAsString = timeDate.format(DateTimeFormatter.ISO_DATE_TIME) + "Z"; // when BsonTimestamp startAt = Options.startAtTimestamp(ImmutableMap.of(Options.START_AT_OPTION, dateAsString)); // then LocalDateTime instant = LocalDateTime.ofEpochSecond(startAt.getTime(), 0, UTC); assertThat(instant).isEqualToIgnoringNanos(timeDate); }
public boolean isAbsolute() { return mUri.isAbsolute(); }
@Test public void isAbsoluteTests() { assertTrue(new AlluxioURI("file:/a").isAbsolute()); assertTrue(new AlluxioURI("file://localhost/a").isAbsolute()); assertFalse(new AlluxioURI("//localhost/a").isAbsolute()); assertFalse(new AlluxioURI("//localhost/").isAbsolute()); assertFalse(new AlluxioURI("/").isAbsolute()); }
@Override @Nullable public IdentifiedDataSerializable create(int typeId) { if (typeId >= 0 && typeId < len) { Supplier<IdentifiedDataSerializable> factory = constructors[typeId]; return factory != null ? factory.get() : null; } return null; }
@Test public void testCreate() { Supplier<IdentifiedDataSerializable>[] constructorFunctions = new Supplier[1]; constructorFunctions[0] = () -> new SampleIdentifiedDataSerializable(); ArrayDataSerializableFactory factory = new ArrayDataSerializableFactory(constructorFunctions); assertNull(factory.create(-1)); assertNull(factory.create(1)); assertThat(factory.create(0)).isInstanceOf(SampleIdentifiedDataSerializable.class); }
public void setSchema(Schema schema) { this.userDefinedSchema = schema; }
@Test void testGenericRecord() throws IOException { final Path outputPath = new Path(File.createTempFile("avro-output-file", "generic.avro").getAbsolutePath()); final AvroOutputFormat<GenericRecord> outputFormat = new AvroOutputFormat<>(outputPath, GenericRecord.class); Schema schema = new Schema.Parser() .parse( "{\"type\":\"record\", \"name\":\"user\", \"fields\": [{\"name\":\"user_name\", \"type\":\"string\"}, {\"name\":\"favorite_number\", \"type\":\"int\"}, {\"name\":\"favorite_color\", \"type\":\"string\"}]}"); outputFormat.setWriteMode(FileSystem.WriteMode.OVERWRITE); outputFormat.setSchema(schema); output(outputFormat, schema); GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(schema); DataFileReader<GenericRecord> dataFileReader = new DataFileReader<>(new File(outputPath.getPath()), reader); while (dataFileReader.hasNext()) { GenericRecord record = dataFileReader.next(); assertThat(record.get("user_name").toString()).isEqualTo("testUser"); assertThat(record.get("favorite_number")).isEqualTo(1); assertThat(record.get("favorite_color").toString()).isEqualTo("blue"); } // cleanup FileSystem fs = FileSystem.getLocalFileSystem(); fs.delete(outputPath, false); }
public boolean fileIsInAllowedPath(Path path) { if (allowedPaths.isEmpty()) { return true; } final Path realFilePath = resolveRealPath(path); if (realFilePath == null) { return false; } for (Path allowedPath : allowedPaths) { final Path realAllowedPath = resolveRealPath(allowedPath); if (realAllowedPath != null && realFilePath.startsWith(realAllowedPath)) { return true; } } return false; }
@Test public void noPathsFileLocationOkNoChecksRequired() throws IOException { pathChecker = new AllowedAuxiliaryPathChecker(new TreeSet<>(Collections.emptySet())); assertTrue(pathChecker.fileIsInAllowedPath(permittedTempDir.newFile(FILE).toPath())); }
@SuppressWarnings("unchecked") public <T> T convert(DocString docString, Type targetType) { if (DocString.class.equals(targetType)) { return (T) docString; } List<DocStringType> docStringTypes = docStringTypeRegistry.lookup(docString.getContentType(), targetType); if (docStringTypes.isEmpty()) { if (docString.getContentType() == null) { throw new CucumberDocStringException(format( "It appears you did not register docstring type for %s", targetType.getTypeName())); } throw new CucumberDocStringException(format( "It appears you did not register docstring type for '%s' or %s", docString.getContentType(), targetType.getTypeName())); } if (docStringTypes.size() > 1) { List<String> suggestedContentTypes = suggestedContentTypes(docStringTypes); if (docString.getContentType() == null) { throw new CucumberDocStringException(format( "Multiple converters found for type %s, add one of the following content types to your docstring %s", targetType.getTypeName(), suggestedContentTypes)); } throw new CucumberDocStringException(format( "Multiple converters found for type %s, and the content type '%s' did not match any of the registered types %s. Change the content type of the docstring or register a docstring type for '%s'", targetType.getTypeName(), docString.getContentType(), suggestedContentTypes, docString.getContentType())); } return (T) docStringTypes.get(0).transform(docString.getContent()); }
@Test void throws_if_converter_type_conflicts_with_type() { registry.defineDocStringType(jsonNodeForJson); registry.defineDocStringType(stringForText); DocString docString = DocString.create("hello world", "json"); CucumberDocStringException exception = assertThrows( CucumberDocStringException.class, () -> converter.convert(docString, String.class)); assertThat(exception.getMessage(), is("Multiple converters found for type java.lang.String, and the content type 'json' " + "did not match any of the registered types [[anonymous], text]. Change the content type of the docstring " + "or register a docstring type for 'json'")); }
@Override public <T> T clone(T object) { if (object instanceof String) { return object; } else if (object instanceof Collection) { Object firstElement = findFirstNonNullElement((Collection) object); if (firstElement != null && !(firstElement instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass()); return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } else if (object instanceof Map) { Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object); if (firstEntry != null) { Object key = firstEntry.getKey(); Object value = firstEntry.getValue(); if (!(key instanceof Serializable) || !(value instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass()); return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } } else if (object instanceof JsonNode) { return (T) ((JsonNode) object).deepCopy(); } if (object instanceof Serializable) { try { return (T) SerializationHelper.clone((Serializable) object); } catch (SerializationException e) { //it is possible that object itself implements java.io.Serializable, but underlying structure does not //in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization } } return jsonClone(object); }
@Test public void should_clone_collection_of_serializable_object() { List<SerializableObject> original = new ArrayList<>(); original.add(new SerializableObject("value")); List<SerializableObject> cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
public static String evaluate(final co.elastic.logstash.api.Event event, final String template) throws JsonProcessingException { if (event instanceof Event) { return evaluate((Event) event, template); } else { throw new IllegalStateException("Unknown event concrete class: " + event.getClass().getName()); } }
@Test public void TestValueIsArray() throws IOException { ArrayList<String> l = new ArrayList<>(); l.add("Hello"); l.add("world"); Event event = getTestEvent(); event.setField("message", l); String path = "%{message}"; assertEquals("Hello,world", StringInterpolation.evaluate(event, path)); }
@Override public Name getLocation(final Path file) throws BackgroundException { final Path container = containerService.getContainer(file); if(container.isRoot()) { return unknown; } return new B2BucketTypeName(BucketType.valueOf(new B2AttributesFinderFeature(session, fileid).find(container).getRegion())); }
@Test public void testAllPrivate() throws Exception { final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path bucket = new B2DirectoryFeature(session, fileid).mkdir(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); assertEquals("allPrivate", new B2BucketTypeFeature(session, fileid).getLocation(bucket).getIdentifier()); new B2DeleteFeature(session, fileid).delete(Collections.singletonList(bucket), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public String named() { return PluginEnum.REQUEST.getName(); }
@Test public void tesNamed() { assertEquals(this.requestPlugin.named(), PluginEnum.REQUEST.getName()); }
@Override public boolean support(AnnotatedElement annotatedEle) { return annotatedEle instanceof Field; }
@Test public void getAnnotationsTest() { AnnotationScanner scanner = new FieldAnnotationScanner(); Field field = ReflectUtil.getField(Example.class, "id"); assertNotNull(field); assertTrue(scanner.support(field)); List<Annotation> annotations = scanner.getAnnotations(field); assertEquals(1, annotations.size()); assertEquals(AnnotationForScannerTest.class, CollUtil.getFirst(annotations).annotationType()); }
@Override public File exportDumpOf(ProjectDescriptor descriptor) { String fileName = slugify(descriptor.getKey()) + DUMP_FILE_EXTENSION; return new File(exportDir, fileName); }
@Test public void exportDumpOf_is_located_in_governance_project_dump_out() { assertThat(underTest.exportDumpOf(projectDescriptor)).isEqualTo(new File(dataDir, "governance/project_dumps/export/project_key.zip")); }
Map<String, String> describeNetworkInterfaces(List<String> privateAddresses, AwsCredentials credentials) { if (privateAddresses.isEmpty()) { return Collections.emptyMap(); } try { Map<String, String> attributes = createAttributesDescribeNetworkInterfaces(privateAddresses); Map<String, String> headers = createHeaders(attributes, credentials); String response = callAwsService(attributes, headers); return parseDescribeNetworkInterfaces(response); } catch (Exception e) { LOGGER.finest(e); // Log warning only once. if (!isNoPublicIpAlreadyLogged) { LOGGER.warning("Cannot fetch the public IPs of ECS Tasks. You won't be able to use " + "Hazelcast Smart Client from outside of this VPC."); isNoPublicIpAlreadyLogged = true; } Map<String, String> map = new HashMap<>(); privateAddresses.forEach(k -> map.put(k, null)); return map; } }
@Test public void describeNetworkInterfacesNoPublicIp() { // given List<String> privateAddresses = asList("10.0.1.207", "10.0.1.82"); String requestUrl = "/?Action=DescribeNetworkInterfaces" + "&Filter.1.Name=addresses.private-ip-address" + "&Filter.1.Value.1=10.0.1.207" + "&Filter.1.Value.2=10.0.1.82" + "&Version=2016-11-15"; //language=XML String response = """ <?xml version="1.0" encoding="UTF-8"?> <DescribeNetworkInterfacesResponse xmlns="http://ec2.amazonaws.com/doc/2016-11-15/"> <networkInterfaceSet> <item> <privateIpAddress>10.0.1.207</privateIpAddress> </item> <item> <privateIpAddress>10.0.1.82</privateIpAddress> </item> </networkInterfaceSet> </DescribeNetworkInterfacesResponse>"""; stubFor(get(urlEqualTo(requestUrl)) .withHeader("X-Amz-Date", equalTo("20200403T102518Z")) .withHeader("Authorization", equalTo(AUTHORIZATION_HEADER)) .withHeader("X-Amz-Security-Token", equalTo(TOKEN)) .willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK).withBody(response))); // when Map<String, String> result = awsEc2Api.describeNetworkInterfaces(privateAddresses, CREDENTIALS); // then assertEquals(2, result.size()); assertTrue(result.containsKey("10.0.1.207")); assertNull(result.get("10.0.1.207")); assertTrue(result.containsKey("10.0.1.82")); assertNull(result.get("10.0.1.82")); }
@Override public int compare(Optional<String> a, Optional<String> b) { if (!a.isPresent()) { if (!b.isPresent()) { return 0; } else { return -1; } } else if (!b.isPresent()) { return 1; } return a.get().compareTo(b.get()); }
@Test public void testComparisons() { assertEquals(0, INSTANCE.compare(Optional.of("foo"), Optional.of("foo"))); assertEquals(-1, INSTANCE.compare(Optional.of("a"), Optional.of("b"))); assertEquals(1, INSTANCE.compare(Optional.of("b"), Optional.of("a"))); assertEquals(-1, INSTANCE.compare(Optional.empty(), Optional.of("a"))); assertEquals(1, INSTANCE.compare(Optional.of("a"), Optional.empty())); assertEquals(0, INSTANCE.compare(Optional.empty(), Optional.empty())); }
@Config("function-implementation-type") public SqlFunctionLanguageConfig setFunctionImplementationType(String implementationType) { this.functionImplementationType = FunctionImplementationType.valueOf(implementationType.toUpperCase()); return this; }
@Test public void testCPPType() { Map<String, String> properties = new ImmutableMap.Builder<String, String>() .put("function-implementation-type", "CPP") .build(); SqlFunctionLanguageConfig expected = new SqlFunctionLanguageConfig() .setFunctionImplementationType("CPP"); assertFullMapping(properties, expected); }
@Override @CheckForNull public EmailMessage format(Notification notification) { if (!BuiltInQPChangeNotification.TYPE.equals(notification.getType())) { return null; } BuiltInQPChangeNotificationBuilder profilesNotification = parse(notification); StringBuilder message = new StringBuilder("The following built-in profiles have been updated:\n\n"); profilesNotification.getProfiles().stream() .sorted(Comparator.comparing(Profile::getLanguageName).thenComparing(Profile::getProfileName)) .forEach(profile -> { message.append("\"") .append(profile.getProfileName()) .append("\" - ") .append(profile.getLanguageName()) .append(": ") .append(server.getPublicRootUrl()).append("/profiles/changelog?language=") .append(profile.getLanguageKey()) .append("&name=") .append(encode(profile.getProfileName())) .append("&since=") .append(formatDate(new Date(profile.getStartDate()))) .append("&to=") .append(formatDate(new Date(profile.getEndDate()))) .append("\n"); int newRules = profile.getNewRules(); if (newRules > 0) { message.append(" ").append(newRules).append(" new rule") .append(plural(newRules)) .append('\n'); } int updatedRules = profile.getUpdatedRules(); if (updatedRules > 0) { message.append(" ").append(updatedRules).append(" rule") .append(updatedRules > 1 ? "s have been updated" : " has been updated") .append("\n"); } int removedRules = profile.getRemovedRules(); if (removedRules > 0) { message.append(" ").append(removedRules).append(" rule") .append(plural(removedRules)) .append(" removed\n"); } message.append("\n"); }); message.append("This is a good time to review your quality profiles and update them to benefit from the latest evolutions: "); message.append(server.getPublicRootUrl()).append("/profiles"); // And finally return the email that will be sent return new EmailMessage() .setMessageId(BuiltInQPChangeNotification.TYPE) .setSubject("Built-in quality profiles have been updated") .setPlainTextMessage(message.toString()); }
@Test public void notification_contains_from_and_to_date() { String profileName = newProfileName(); String languageKey = newLanguageKey(); String languageName = newLanguageName(); long startDate = 1_000_000_000_000L; long endDate = startDate + 1_100_000_000_000L; BuiltInQPChangeNotificationBuilder notification = new BuiltInQPChangeNotificationBuilder() .addProfile(Profile.newBuilder() .setProfileName(profileName) .setLanguageKey(languageKey) .setLanguageName(languageName) .setStartDate(startDate) .setEndDate(endDate) .build()); EmailMessage emailMessage = underTest.format(notification.build()); assertMessage(emailMessage, profileTitleText(profileName, languageKey, languageName, formatDate(new Date(startDate)), formatDate(new Date(endDate)))); }
public static Slice encodeScaledValue(BigDecimal value, int scale) { checkArgument(scale >= 0); return encodeScaledValue(value.setScale(scale, UNNECESSARY)); }
@Test public void testEncodeScaledValue() { assertEquals(encodeScaledValue(new BigDecimal("2.00"), 2), sliceFromBytes(200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); assertEquals(encodeScaledValue(new BigDecimal("2.13"), 2), sliceFromBytes(213, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); assertEquals(encodeScaledValue(new BigDecimal("172.60"), 2), sliceFromBytes(108, 67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); assertEquals(encodeScaledValue(new BigDecimal("2"), 2), sliceFromBytes(200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); assertEquals(encodeScaledValue(new BigDecimal("172.6"), 2), sliceFromBytes(108, 67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); int minus = 0x80; assertEquals(encodeScaledValue(new BigDecimal("-2.00"), 2), sliceFromBytes(200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, minus)); assertEquals(encodeScaledValue(new BigDecimal("-2.13"), 2), sliceFromBytes(213, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, minus)); assertEquals(encodeScaledValue(new BigDecimal("-2"), 2), sliceFromBytes(200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, minus)); assertEquals(encodeScaledValue(new BigDecimal("-172.60"), 2), sliceFromBytes(108, 67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, minus)); }
@Override public AppResponse process(Flow flow, AppSessionRequest request) { Map<String, Object> result = new HashMap<>(digidClient.getAccountRequestGbaStatus(appSession.getRegistrationId())); if (result.get(lowerUnderscore(STATUS)).equals("OK")) { return new OkResponse(); } else if (result.get(lowerUnderscore(STATUS)).equals("PENDING")) { setValid(false); // gba check in progress, do not transition to next step yet return new StatusResponse("PENDING"); } else { if (result.get(lowerUnderscore(STATUS)).equals("NOK") && result.get(ERROR) != null) { return new PollBrpResponse((String) result.get(ERROR), result); } return new NokResponse(); } }
@Test void processOKTest() { when(digidClientMock.getAccountRequestGbaStatus(1337L)).thenReturn(Map.of( lowerUnderscore(STATUS), "OK" )); AppResponse appResponse = pollBrp.process(flowMock, null); assertTrue(appResponse instanceof OkResponse); assertEquals("OK", ((OkResponse) appResponse).getStatus()); }
@Override public String addResource(String key, String fileName) { String interned = intern(key); synchronized (interned) { SharedCacheResource resource = cachedResources.get(interned); if (resource == null) { resource = new SharedCacheResource(fileName); cachedResources.put(interned, resource); } return resource.getFileName(); } }
@Test void testAddResourceConcurrency() throws Exception { startEmptyStore(); final String key = "key1"; int count = 5; ExecutorService exec = HadoopExecutors.newFixedThreadPool(count); List<Future<String>> futures = new ArrayList<Future<String>>(count); final CountDownLatch start = new CountDownLatch(1); for (int i = 0; i < count; i++) { final String fileName = "foo-" + i + ".jar"; Callable<String> task = new Callable<String>() { public String call() throws Exception { start.await(); String result = store.addResource(key, fileName); System.out.println("fileName: " + fileName + ", result: " + result); return result; } }; futures.add(exec.submit(task)); } // start them all at the same time start.countDown(); // check the result; they should all agree with the value Set<String> results = new HashSet<String>(); for (Future<String> future : futures) { results.add(future.get()); } assertSame(1, results.size()); exec.shutdown(); }
public static void toast(Context context, @StringRes int message) { // this is a static method so it is easier to call, // as the context checking and casting is done for you if (context == null) return; if (!(context instanceof Application)) { context = context.getApplicationContext(); } if (context instanceof Application) { final Context c = context; final @StringRes int m = message; getInstance().runInApplicationThread(() -> Toast.makeText(c, m, Toast.LENGTH_LONG).show()); } }
@Test public void testToastWithString() { AppConfig.toast(ApplicationProvider.getApplicationContext(), "Hello world"); shadowOf(getMainLooper()).idle(); await().atMost(5, TimeUnit.SECONDS).until(() -> ShadowToast.getLatestToast() != null); assertEquals("Hello world", ShadowToast.getTextOfLatestToast()); }