focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static String md5Hex(final String data) { return org.apache.commons.codec.digest.DigestUtils.md5Hex(data); }
@Test public void testMd5Hex() { final String md5 = "e10adc3949ba59abbe56e057f20f883e"; assertEquals(md5, DigestUtils.md5Hex("123456")); }
@Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { URL url = invoker.getUrl(); String methodName = RpcUtils.getMethodName(invocation); int max = url.getMethodParameter(methodName, EXECUTES_KEY, 0); if (!RpcStatus.beginCount(url, methodName, max)) { throw new RpcException( RpcException.LIMIT_EXCEEDED_EXCEPTION, "Failed to invoke method " + RpcUtils.getMethodName(invocation) + " in provider " + url + ", cause: The service using threads greater than <dubbo:service executes=\"" + max + "\" /> limited."); } invocation.put(EXECUTE_LIMIT_FILTER_START_TIME, System.currentTimeMillis()); try { return invoker.invoke(invocation); } catch (Throwable t) { if (t instanceof RuntimeException) { throw (RuntimeException) t; } else { throw new RpcException("unexpected exception when ExecuteLimitFilter", t); } } }
@Test void testExecuteLimitInvoke() { Invoker invoker = Mockito.mock(Invoker.class); when(invoker.invoke(any(Invocation.class))).thenReturn(new AppResponse("result")); when(invoker.getUrl()) .thenReturn(URL.valueOf("test://test:11/test?accesslog=true&group=dubbo&version=1.1&executes=10")); Invocation invocation = Mockito.mock(Invocation.class); when(invocation.getMethodName()).thenReturn("testExecuteLimitInvoke"); Result result = executeLimitFilter.invoke(invoker, invocation); Assertions.assertEquals("result", result.getValue()); }
public final void sendResponse(Object value) { OperationResponseHandler responseHandler = getOperationResponseHandler(); if (responseHandler == null) { if (value instanceof Throwable throwable) { // in case of a throwable, we want the stacktrace. getLogger().warning("Missing responseHandler for " + toString(), throwable); } else { getLogger().warning("Missing responseHandler for " + toString() + " value[" + value + "]"); } } else { responseHandler.sendResponse(this, value); } }
@Test public void sendResponse_whenResponseHandlerIsNull_andNoThrowableValue_thenNoNPE() { Operation op = new DummyOperation(); op.sendResponse("foo"); }
public KsqlTarget target(final URI server) { return target(server, Collections.emptyMap()); }
@Test public void shouldOverrideProperties() { // Given: setupExpectedResponse(); Map<String, Object> props = new HashMap<>(); props.put("enable.auto.commit", true); // When: KsqlTarget target = ksqlClient.target(serverUri).properties(props); target.postKsqlRequest("some ksql", Collections.emptyMap(), Optional.of(123L)); // Then: assertThat(getKsqlRequest().getConfigOverrides(), is(props)); }
@Override public int read() throws IOException { throwIfClosed(); if (remoteObject.size() == 0 || nextReadPos >= remoteObject.size()) { return -1; } if (!ensureCurrentBuffer()) { return -1; } nextReadPos++; incrementBytesRead(1); return fpos.buffer().get() & 0xff; }
@Test public void testRead() throws Exception { S3ARemoteInputStream inputStream = S3APrefetchFakes.createS3InMemoryInputStream(futurePool, "bucket", "key", FILE_SIZE); testReadHelper(inputStream, FILE_SIZE); inputStream = S3APrefetchFakes.createS3CachingInputStream(futurePool, "bucket", "key", FILE_SIZE, 5, 2); testReadHelper(inputStream, 5); }
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { if ( databaseMeta == null ) { return; // TODO: throw an exception here } if ( cachedRowMetaActive ) { row.addRowMeta( cachedRowMeta ); return; } boolean param = false; Database db = getDatabase(); super.databases = new Database[] { db }; // keep track of it for canceling purposes... // First try without connecting to the database... (can be S L O W) String sNewSQL = sql; if ( isVariableReplacementActive() ) { sNewSQL = db.environmentSubstitute( sql ); if ( space != null ) { sNewSQL = space.environmentSubstitute( sNewSQL ); } } RowMetaInterface add = null; try { add = db.getQueryFields( sNewSQL, param ); } catch ( KettleDatabaseException dbe ) { throw new KettleStepException( "Unable to get queryfields for SQL: " + Const.CR + sNewSQL, dbe ); } if ( add != null ) { attachOrigin( add, origin ); row.addRowMeta( add ); } else { try { db.connect(); RowMetaInterface paramRowMeta = null; Object[] paramData = null; StreamInterface infoStream = getStepIOMeta().getInfoStreams().get( 0 ); if ( !Utils.isEmpty( infoStream.getStepname() ) ) { param = true; if ( info.length > 0 && info[ 0 ] != null ) { paramRowMeta = info[ 0 ]; paramData = RowDataUtil.allocateRowData( paramRowMeta.size() ); } } add = db.getQueryFields( sNewSQL, param, paramRowMeta, paramData ); if ( add == null ) { return; } attachOrigin( add, origin ); row.addRowMeta( add ); } catch ( KettleException ke ) { throw new KettleStepException( "Unable to get queryfields for SQL: " + Const.CR + sNewSQL, ke ); } finally { db.disconnect(); } } if ( isLazyConversionActive() ) { for ( int i = 0; i < row.size(); i++ ) { ValueMetaInterface v = row.getValueMeta( i ); try { if ( v.getType() == ValueMetaInterface.TYPE_STRING ) { ValueMetaInterface storageMeta = ValueMetaFactory.cloneValueMeta( v ); storageMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); v.setStorageMetadata( storageMeta ); v.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); } } catch ( KettlePluginException e ) { throw new KettleStepException( "Unable to clone meta for lazy conversion: " + Const.CR + v, e ); } } } }
@Test public void testGetFields() throws Exception { TableInputMetaHandler meta = new TableInputMetaHandler(); meta.setLazyConversionActive( true ); DatabaseMeta dbMeta = mock( DatabaseMeta.class ); meta.setDatabaseMeta( dbMeta ); Database mockDB = meta.getDatabase(); when( mockDB.getQueryFields( anyString(), anyBoolean() ) ).thenReturn( createMockFields() ); RowMetaInterface expectedRowMeta = new RowMeta(); ValueMetaInterface valueMeta = new ValueMetaString( "field1" ); valueMeta.setStorageMetadata( new ValueMetaString( "field1" ) ); valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); expectedRowMeta.addValueMeta( valueMeta ); VariableSpace space = mock( VariableSpace.class ); RowMetaInterface rowMetaInterface = new RowMeta(); meta.getFields( rowMetaInterface, "TABLE_INPUT_META", null, null, space, null, null ); verify( mockDB).getQueryFields( any(), anyBoolean() ) ; }
@Override public void run() throws InvalidInputException { String tableType = _input.getTableType(); if ((tableType.equalsIgnoreCase(REALTIME) || tableType.equalsIgnoreCase(HYBRID))) { _output.setAggregateMetrics(shouldAggregate(_input)); } }
@Test public void testRunComplexExpressionInSumWithMetricColumns() throws Exception { Set<String> metrics = ImmutableSet.of("a", "b", "c"); InputManager input = createInput(metrics, "select sum(a), sum(b), sum(2 * a + 3 * b + c) from tableT"); ConfigManager output = new ConfigManager(); AggregateMetricsRule rule = new AggregateMetricsRule(input, output); rule.run(); assertTrue(output.isAggregateMetrics()); }
public long asLong() { checkState(type == Type.INTEGER || type == Type.LONG, "Value is not a long or integer"); return Long.parseLong(value); }
@Test public void asLong() { ConfigProperty p = defineProperty("foo", LONG, "123", "Foo Prop"); validate(p, "foo", LONG, "123", "123"); assertEquals("incorrect value", 123L, p.asLong()); }
public static <InputT, AccumT, OutputT> CombineFnWithContext<InputT, AccumT, OutputT> toFnWithContext( GlobalCombineFn<InputT, AccumT, OutputT> globalCombineFn) { if (globalCombineFn instanceof CombineFnWithContext) { @SuppressWarnings("unchecked") CombineFnWithContext<InputT, AccumT, OutputT> combineFnWithContext = (CombineFnWithContext<InputT, AccumT, OutputT>) globalCombineFn; return combineFnWithContext; } else { @SuppressWarnings("unchecked") final CombineFn<InputT, AccumT, OutputT> combineFn = (CombineFn<InputT, AccumT, OutputT>) globalCombineFn; return new CombineFnWithContext<InputT, AccumT, OutputT>() { @Override public AccumT createAccumulator(Context c) { return combineFn.createAccumulator(); } @Override public AccumT addInput(AccumT accumulator, InputT input, Context c) { return combineFn.addInput(accumulator, input); } @Override public AccumT mergeAccumulators(Iterable<AccumT> accumulators, Context c) { return combineFn.mergeAccumulators(accumulators); } @Override public OutputT extractOutput(AccumT accumulator, Context c) { return combineFn.extractOutput(accumulator); } @Override public AccumT compact(AccumT accumulator, Context c) { return combineFn.compact(accumulator); } @Override public OutputT defaultValue() { return combineFn.defaultValue(); } @Override public Coder<AccumT> getAccumulatorCoder(CoderRegistry registry, Coder<InputT> inputCoder) throws CannotProvideCoderException { return combineFn.getAccumulatorCoder(registry, inputCoder); } @Override public Coder<OutputT> getDefaultOutputCoder( CoderRegistry registry, Coder<InputT> inputCoder) throws CannotProvideCoderException { return combineFn.getDefaultOutputCoder(registry, inputCoder); } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); combineFn.populateDisplayData(builder); } }; } }
@Test public void testToFnWithContext() throws Exception { CombineFnWithContext<Integer, int[], Integer> fnWithContext = CombineFnUtil.toFnWithContext(Sum.ofIntegers()); List<Integer> inputs = ImmutableList.of(1, 2, 3, 4); Context nullContext = CombineContextFactory.nullContext(); int[] accum = fnWithContext.createAccumulator(nullContext); for (Integer i : inputs) { accum = fnWithContext.addInput(accum, i, nullContext); } assertEquals(10, fnWithContext.extractOutput(accum, nullContext).intValue()); }
@Override public ValueMetaInterface getValueMeta( int index ) { lock.readLock().lock(); try { if ( ( index >= 0 ) && ( index < valueMetaList.size() ) ) { return valueMetaList.get( index ); } else { return null; } } finally { lock.readLock().unlock(); } }
@Test public void testGetValueMeta() { // see before method insertion order. assertEquals( rowMeta.getValueMeta( 1 ), integer ); }
@Override public void initialize(Map<String, String> props) { this.properties = SerializableMap.copyOf(props); this.s3FileIOProperties = new S3FileIOProperties(properties); this.createStack = PropertyUtil.propertyAsBoolean(props, "init-creation-stacktrace", true) ? Thread.currentThread().getStackTrace() : null; // Do not override s3 client if it was provided if (s3 == null) { Object clientFactory = S3FileIOAwsClientFactories.initialize(props); if (clientFactory instanceof S3FileIOAwsClientFactory) { this.s3 = ((S3FileIOAwsClientFactory) clientFactory)::s3; } if (clientFactory instanceof AwsClientFactory) { this.s3 = ((AwsClientFactory) clientFactory)::s3; } if (clientFactory instanceof CredentialSupplier) { this.credential = ((CredentialSupplier) clientFactory).getCredential(); } if (s3FileIOProperties.isPreloadClientEnabled()) { client(); } } initMetrics(properties); }
@Test public void testResolvingFileIOLoad() { ResolvingFileIO resolvingFileIO = new ResolvingFileIO(); resolvingFileIO.setConf(new Configuration()); resolvingFileIO.initialize(ImmutableMap.of()); FileIO result = DynMethods.builder("io") .hiddenImpl(ResolvingFileIO.class, String.class) .build(resolvingFileIO) .invoke("s3://foo/bar"); assertThat(result).isInstanceOf(S3FileIO.class); }
public ConfigTransformerResult transform(Map<String, String> configs) { Map<String, Map<String, Set<String>>> keysByProvider = new HashMap<>(); Map<String, Map<String, Map<String, String>>> lookupsByProvider = new HashMap<>(); // Collect the variables from the given configs that need transformation for (Map.Entry<String, String> config : configs.entrySet()) { if (config.getValue() != null) { List<ConfigVariable> configVars = getVars(config.getValue(), DEFAULT_PATTERN); for (ConfigVariable configVar : configVars) { Map<String, Set<String>> keysByPath = keysByProvider.computeIfAbsent(configVar.providerName, k -> new HashMap<>()); Set<String> keys = keysByPath.computeIfAbsent(configVar.path, k -> new HashSet<>()); keys.add(configVar.variable); } } } // Retrieve requested variables from the ConfigProviders Map<String, Long> ttls = new HashMap<>(); for (Map.Entry<String, Map<String, Set<String>>> entry : keysByProvider.entrySet()) { String providerName = entry.getKey(); ConfigProvider provider = configProviders.get(providerName); Map<String, Set<String>> keysByPath = entry.getValue(); if (provider != null && keysByPath != null) { for (Map.Entry<String, Set<String>> pathWithKeys : keysByPath.entrySet()) { String path = pathWithKeys.getKey(); Set<String> keys = new HashSet<>(pathWithKeys.getValue()); ConfigData configData = provider.get(path, keys); Map<String, String> data = configData.data(); Long ttl = configData.ttl(); if (ttl != null && ttl >= 0) { ttls.put(path, ttl); } Map<String, Map<String, String>> keyValuesByPath = lookupsByProvider.computeIfAbsent(providerName, k -> new HashMap<>()); keyValuesByPath.put(path, data); } } } // Perform the transformations by performing variable replacements Map<String, String> data = new HashMap<>(configs); for (Map.Entry<String, String> config : configs.entrySet()) { data.put(config.getKey(), replace(lookupsByProvider, config.getValue(), DEFAULT_PATTERN)); } return new ConfigTransformerResult(data, ttls); }
@Test public void testNullConfigValue() { ConfigTransformerResult result = configTransformer.transform(Collections.singletonMap(MY_KEY, null)); Map<String, String> data = result.data(); Map<String, Long> ttls = result.ttls(); assertNull(data.get(MY_KEY)); assertTrue(ttls.isEmpty()); }
public static <T> T checkNotNull(T argument, String name) { if (argument == null) { throw new NullPointerException(name + " can't be null"); } return argument; }
@Test(expected = NullPointerException.class) public void test_checkNotNull2_whenNull() { checkNotNull(null, "foo"); }
public List<Service> importServiceDefinition(String repositoryUrl, Secret repositorySecret, boolean disableSSLValidation, boolean mainArtifact) throws MockRepositoryImportException { log.info("Importing service definitions from {}", repositoryUrl); File localFile = null; Map<String, List<String>> fileProperties = null; if (repositoryUrl.startsWith("http")) { try { HTTPDownloader.FileAndHeaders fileAndHeaders = HTTPDownloader .handleHTTPDownloadToFileAndHeaders(repositoryUrl, repositorySecret, disableSSLValidation); localFile = fileAndHeaders.getLocalFile(); fileProperties = fileAndHeaders.getResponseHeaders(); } catch (IOException ioe) { throw new MockRepositoryImportException(repositoryUrl + " cannot be downloaded", ioe); } } else { // Simply build localFile from repository url. localFile = new File(repositoryUrl); } RelativeReferenceURLBuilder referenceURLBuilder = RelativeReferenceURLBuilderFactory .getRelativeReferenceURLBuilder(fileProperties); String artifactName = referenceURLBuilder.getFileName(repositoryUrl, fileProperties); // Initialize a reference resolver to the folder of this repositoryUrl. ReferenceResolver referenceResolver = new ReferenceResolver(repositoryUrl, repositorySecret, disableSSLValidation, referenceURLBuilder); return importServiceDefinition(localFile, referenceResolver, new ArtifactInfo(artifactName, mainArtifact)); }
@Test void testImportServiceDefinitionMainGrpcAndSecondaryExamples() { List<Service> services = null; try { File artifactFile = new File("target/test-classes/io/github/microcks/util/grpc/hello-v1.proto"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("hello-v1.proto", true)); } catch (MockRepositoryImportException mrie) { fail("No MockRepositoryImportException should have be thrown"); } assertNotNull(services); assertEquals(1, services.size()); // Inspect Service own attributes. Service importedSvc = services.get(0); assertEquals("io.github.microcks.grpc.hello.v1.HelloService", importedSvc.getName()); assertEquals("v1", importedSvc.getVersion()); assertEquals("hello-v1.proto", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(1, importedSvc.getOperations().size()); // As operation as only scalar type, it should be QUERY_ARGS dispatcher. assertEquals(DispatchStyles.QUERY_ARGS, importedSvc.getOperations().get(0).getDispatcher()); assertEquals("firstname && lastname", importedSvc.getOperations().get(0).getDispatcherRules()); // Inspect and check requests. List<Request> requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(0, requests.size()); // Inspect and check responses. List<Response> responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(0, responses.size()); try { File artifactFile = new File("target/test-classes/io/github/microcks/util/grpc/hello-v1-examples.yml"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("hello-v1-examples.yml", false)); } catch (MockRepositoryImportException mrie) { fail("No MockRepositoryImportException should have be thrown"); } // Inspect Service own attributes. importedSvc = services.get(0); assertEquals("io.github.microcks.grpc.hello.v1.HelloService", importedSvc.getName()); assertEquals("v1", importedSvc.getVersion()); assertEquals("hello-v1.proto", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(1, importedSvc.getOperations().size()); // Inspect and check requests. requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(2, requests.size()); for (Request request : requests) { assertEquals("hello-v1-examples.yml", request.getSourceArtifact()); } // Inspect and check responses. responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(2, requests.size()); for (Response response : responses) { assertEquals("hello-v1-examples.yml", response.getSourceArtifact()); if ("Laurent".equals(response.getName())) { assertEquals("?firstname=Laurent?lastname=Broudoux", response.getDispatchCriteria()); } else if ("John".equals(response.getName())) { assertEquals("?firstname=John?lastname=Doe", response.getDispatchCriteria()); } else { fail("Unexpected response name: " + response.getName()); } } }
@Override public ServerId create() { return ServerId.of(computeDatabaseId(), serverIdGenerator.generate()); }
@Test public void create_from_ServerId_fails_with_ISE_if_JDBC_property_not_set() { expectMissingJdbcUrlISE(() -> underTest.create(A_SERVERID)); }
@Override protected ExecuteContext doBefore(ExecuteContext context) { String database = getDataBaseInfo(context).getDatabaseName(); Object argument = context.getArguments()[PARAM_INDEX]; String sql; if (argument instanceof ClientPrepareResult) { sql = ((ClientPrepareResult) argument).getSql(); } else { sql = (String) argument; } handleWriteOperationIfWriteDisabled(sql, database, DatabaseWriteProhibitionManager.getMySqlProhibitionDatabases(), context); return context; }
@Test public void testDoBefore() throws Exception { // the database write prohibition switch is disabled globalConfig.setEnableMySqlWriteProhibition(false); context = ExecuteContext.forMemberMethod(protocolMock, methodMock, argument, null, null); interceptor.before(context); Assert.assertNull(context.getThrowableOut()); // The database write prohibition function is disabled. // The write prohibition database set contains the database that is blocked Set<String> databases = new HashSet<>(); databases.add("database-test"); globalConfig.setMySqlDatabases(databases); interceptor.before(context); Assert.assertNull(context.getThrowableOut()); // The database write prohibition switch is enabled, and the database set contains the database that is // blocked, method input is String globalConfig.setEnableMySqlWriteProhibition(true); context = ExecuteContext.forMemberMethod(protocolMock, methodMock, argument, null, null); interceptor.before(context); Assert.assertEquals("Database prohibit to write, database: database-test", context.getThrowableOut().getMessage()); //// The database write prohibition switch is enabled, and the database set contains the database that is // blocked, method input is ClientPrepareResult argument[PARAM_INDEX] = resultMock; context = ExecuteContext.forMemberMethod(protocolMock, methodMock, argument, null, null); interceptor.before(context); Assert.assertEquals("Database prohibit to write, database: database-test", context.getThrowableOut().getMessage()); //The database write prohibition switch is turned on, the sql does not write, // and the database set contains the blocked database sql = "SELECT * FROM table"; argument[PARAM_INDEX] = sql; context = ExecuteContext.forMemberMethod(protocolMock, methodMock, argument, null, null); interceptor.before(context); Assert.assertNull(context.getThrowableOut()); //The database write prohibition switch is enabled. The database set does not contain the database that is // blocked, method input is String argument[PARAM_INDEX] = "INSERT INTO table (name) VALUES ('test')"; globalConfig.setMySqlDatabases(new HashSet<>()); context = ExecuteContext.forMemberMethod(protocolMock, methodMock, argument, null, null); interceptor.before(context); Assert.assertNull(context.getThrowableOut()); //The database write prohibition switch is enabled. The database set does not contain the database that is // blocked, method input is ClientPrepareResult argument[PARAM_INDEX] = resultMock; context = ExecuteContext.forMemberMethod(protocolMock, methodMock, argument, null, null); interceptor.before(context); Assert.assertNull(context.getThrowableOut()); }
@JsonCreator public static ModelId of(String id) { Preconditions.checkArgument(StringUtils.isNotBlank(id), "ID must not be blank"); return new AutoValue_ModelId(id); }
@Test public void serialize() throws IOException { final ModelId modelId = objectMapper.readValue("\"foobar\"", ModelId.class); assertThat(modelId).isEqualTo(ModelId.of("foobar")); }
@Override public void setTimestamp(final Path file, final TransferStatus status) throws BackgroundException { if(file.isVolume()) { log.warn(String.format("Skip setting timestamp for %s", file)); return; } final S3MetadataFeature feature = new S3MetadataFeature(session, new S3AccessControlListFeature(session)); // Copy existing metadata in addition to timestamp final PathAttributes attr = new S3AttributesFinderFeature(session, new S3AccessControlListFeature(session)).find(file); final Map<String, String> metadata = attr.getMetadata(); if(status.getModified() != null) { final Header header = S3TimestampFeature.toHeader(S3TimestampFeature.METADATA_MODIFICATION_DATE, status.getModified()); metadata.put(StringUtils.lowerCase(header.getName()), header.getValue()); } if(status.getCreated() != null) { final Header header = S3TimestampFeature.toHeader(S3TimestampFeature.METADATA_CREATION_DATE, status.getCreated()); metadata.put(StringUtils.lowerCase(header.getName()), header.getValue()); } feature.setMetadata(file, status.withMetadata(metadata)); }
@Test public void testFindTimestamp() throws Exception { final Path bucket = new Path("versioning-test-eu-central-1-cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory)); final TransferStatus status = new TransferStatus(); final S3AccessControlListFeature acl = new S3AccessControlListFeature(session); final Path test = new S3TouchFeature(session, acl).touch(new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), status.withCreated(1695159781972L).withModified(1530305150672L)); final String versionId = test.attributes().getVersionId(); assertEquals(1530305150000L, status.getResponse().getModificationDate()); assertEquals(1695159781000L, status.getResponse().getCreationDate()); final PathAttributes attributes = new S3AttributesFinderFeature(session, acl).find(test); assertEquals(1530305150000L, attributes.getModificationDate()); assertEquals(1695159781000L, attributes.getCreationDate()); final Map<String, String> metadata = attributes.getMetadata(); assertEquals(3, metadata.size()); assertTrue(metadata.containsKey("mtime")); assertTrue(metadata.containsKey("btime")); assertTrue(metadata.containsKey("Content-Type")); final S3TimestampFeature feature = new S3TimestampFeature(session); feature.setTimestamp(test, status.withModified(1630305150672L).withCreated(1530305160672L)); assertNotEquals(versionId, status.getResponse().getVersionId()); assertEquals("1630305150", status.getResponse().getMetadata().get("mtime")); assertEquals("1530305160", status.getResponse().getMetadata().get("btime")); final PathAttributes attributesAfterSettingNewTimestamps = new S3AttributesFinderFeature(session, acl).find(test.withAttributes(status.getResponse())); assertNotEquals(metadata, attributesAfterSettingNewTimestamps.getMetadata()); assertEquals("1630305150", attributesAfterSettingNewTimestamps.getMetadata().get("mtime")); assertEquals("1530305160", attributesAfterSettingNewTimestamps.getMetadata().get("btime")); assertEquals(metadata.size(), attributesAfterSettingNewTimestamps.getMetadata().size()); assertNotEquals(versionId, attributesAfterSettingNewTimestamps.getVersionId()); assertEquals(status.getResponse().getVersionId(), attributesAfterSettingNewTimestamps.getVersionId()); assertEquals(1630305150000L, attributesAfterSettingNewTimestamps.getModificationDate()); assertEquals(1530305160000L, attributesAfterSettingNewTimestamps.getCreationDate()); test.attributes().setModificationDate(1630305150000L); final Path found = new S3ObjectListService(session, acl, true).list(bucket, new DisabledListProgressListener()).find(new DefaultPathPredicate(test)); assertEquals(1630305150000L, found.attributes().getModificationDate()); final Path moved = new S3MoveFeature(session, acl).move(test, new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), status, new Delete.DisabledCallback(), new DisabledConnectionCallback()); assertEquals(1630305150000L, moved.attributes().getModificationDate()); assertEquals(1630305150000L, new S3AttributesFinderFeature(session, acl).find(moved).getModificationDate()); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(moved), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public int hashCode() { return jobId.hashCode() * 524287 + id; }
@Test public void testHashCode() { TaskType[] types = TaskType.values(); for (int i = 0; i < types.length; i++) { JobID jobId = new JobID("1234" + i, i); TaskID taskId1 = new TaskID(jobId, types[i], i); TaskID taskId2 = new TaskID(jobId, types[i], i); assertTrue("The hashcode() method gave unequal hash codes for two equal " + "task IDs", taskId1.hashCode() == taskId2.hashCode()); } }
@Override public RecoverableFsDataOutputStream.Committer recoverForCommit(CommitRecoverable resumable) { LOGGER.trace("Recovering output stream for commit: {}", resumable); Preconditions.checkNotNull(resumable); GSCommitRecoverable recoverable = (GSCommitRecoverable) resumable; return new GSRecoverableWriterCommitter(storage, options, recoverable); }
@Test public void testRecoverForCommit() { GSRecoverableWriterCommitter committer = (GSRecoverableWriterCommitter) writer.recoverForCommit(commitRecoverable); assertEquals(options, committer.options); assertEquals(commitRecoverable, committer.recoverable); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SimpleSubscriptionData that = (SimpleSubscriptionData) o; return Objects.equals(topic, that.topic) && Objects.equals(expressionType, that.expressionType) && Objects.equals(expression, that.expression); }
@Test public void testNotEqual() { String topic = "test-topic"; String expressionType = "TAG"; String expression1 = "test-expression-1"; String expression2 = "test-expression-2"; SimpleSubscriptionData simpleSubscriptionData1 = new SimpleSubscriptionData(topic, expressionType, expression1, 1); SimpleSubscriptionData simpleSubscriptionData2 = new SimpleSubscriptionData(topic, expressionType, expression2, 1); assertThat(simpleSubscriptionData1.equals(simpleSubscriptionData2)).isFalse(); }
public static <K, V> WriteRecords<K, V> writeRecords() { return new AutoValue_KafkaIO_WriteRecords.Builder<K, V>() .setProducerConfig(WriteRecords.DEFAULT_PRODUCER_PROPERTIES) .setEOS(false) .setNumShards(0) .setConsumerFactoryFn(KafkaIOUtils.KAFKA_CONSUMER_FACTORY_FN) .setBadRecordRouter(BadRecordRouter.THROWING_ROUTER) .setBadRecordErrorHandler(new DefaultErrorHandler<>()) .build(); }
@Test public void testRecordsSink() throws Exception { // Simply read from kafka source and write to kafka sink using ProducerRecord transform. Then // verify the records are correctly published to mock kafka producer. int numElements = 1000; try (MockProducerWrapper producerWrapper = new MockProducerWrapper(new LongSerializer())) { ProducerSendCompletionThread completionThread = new ProducerSendCompletionThread(producerWrapper.mockProducer).start(); String topic = "test"; p.apply(mkKafkaReadTransform(numElements, new ValueAsTimestampFn()).withoutMetadata()) .apply(ParDo.of(new KV2ProducerRecord(topic))) .setCoder(ProducerRecordCoder.of(VarIntCoder.of(), VarLongCoder.of())) .apply( KafkaIO.<Integer, Long>writeRecords() .withBootstrapServers("none") .withTopic(topic) .withKeySerializer(IntegerSerializer.class) .withValueSerializer(LongSerializer.class) .withInputTimestamp() .withProducerFactoryFn(new ProducerFactoryFn(producerWrapper.producerKey))); p.run(); completionThread.shutdown(); verifyProducerRecords(producerWrapper.mockProducer, topic, numElements, false, true); } }
@Override public AdminApiLookupStrategy<TopicPartition> lookupStrategy() { return lookupStrategy; }
@Test public void testBrokerIdSetInOptions() { int brokerId = 3; Set<TopicPartition> topicPartitions = mkSet( new TopicPartition("foo", 5), new TopicPartition("bar", 3), new TopicPartition("foo", 4) ); DescribeProducersHandler handler = newHandler( new DescribeProducersOptions().brokerId(brokerId) ); topicPartitions.forEach(topicPartition -> { ApiRequestScope scope = handler.lookupStrategy().lookupScope(topicPartition); assertEquals(OptionalInt.of(brokerId), scope.destinationBrokerId(), "Unexpected brokerId for " + topicPartition); }); }
public void add(Sequence value) { // TODO we can probably optimize this a bit for(long i=value.first; i<value.last+1; i++) { add(i); } }
@Test public void testIsEmpty() { SequenceSet set = new SequenceSet(); assertTrue(set.isEmpty()); set.add(1); assertFalse(set.isEmpty()); }
@Udf(schema = "ARRAY<STRUCT<K STRING, V STRING>>") public List<Struct> entriesString( @UdfParameter(description = "The map to create entries from") final Map<String, String> map, @UdfParameter(description = "If true then the resulting entries are sorted by key") final boolean sorted ) { return entries(map, STRING_STRUCT_SCHEMA, sorted); }
@Test public void shouldComputeStringEntriesSorted() { final Map<String, String> map = createMap(String::valueOf); shouldComputeEntriesSorted(map, () -> entriesUdf.entriesString(map, true)); }
public Node parse() throws ScanException { if (tokenList == null || tokenList.isEmpty()) return null; return E(); }
@Test public void defaultSeparatorOutsideOfAVariable() throws ScanException { Tokenizer tokenizer = new Tokenizer("{a:-b}"); Parser parser = new Parser(tokenizer.tokenize()); Node node = parser.parse(); dump(node); Node witness = new Node(Node.Type.LITERAL, "{"); Node t = witness.next = new Node(Node.Type.LITERAL, "a"); t.next = new Node(Node.Type.LITERAL, ":-"); t = t.next; t.next = new Node(Node.Type.LITERAL, "b"); t = t.next; t.next = new Node(Node.Type.LITERAL, "}"); assertEquals(witness, node); }
public static YamlAdvisorsConfiguration load(final InputStream inputStream) { YamlAdvisorsConfiguration result = AgentYamlEngine.unmarshalYamlAdvisorsConfiguration(inputStream); return null == result ? new YamlAdvisorsConfiguration() : result; }
@Test void assertLoadEmptyFile() { YamlAdvisorsConfiguration actual = YamlAdvisorsConfigurationLoader.load(getClass().getResourceAsStream("/META-INF/conf/empty-advisors.yaml")); assertTrue(actual.getAdvisors().isEmpty()); }
public static int decrementSequence(int sequence, int decrement) { if (sequence < decrement) return Integer.MAX_VALUE - (decrement - sequence) + 1; return sequence - decrement; }
@Test public void testDecrementSequence() { assertEquals(0, DefaultRecordBatch.decrementSequence(5, 5)); assertEquals(Integer.MAX_VALUE, DefaultRecordBatch.decrementSequence(0, 1)); }
@Override public void judgeContinueToExecute(final SQLStatement statement) throws SQLException { ShardingSpherePreconditions.checkState(statement instanceof CommitStatement || statement instanceof RollbackStatement, () -> new SQLFeatureNotSupportedException("Current transaction is aborted, commands ignored until end of transaction block.")); }
@Test void assertJudgeContinueToExecuteWithCommitStatement() { assertDoesNotThrow(() -> allowedSQLStatementHandler.judgeContinueToExecute(mock(CommitStatement.class))); }
@Nonnull public static <T> AggregateOperation1<T, LongAccumulator, Long> counting() { return AggregateOperation .withCreate(LongAccumulator::new) .andAccumulate((LongAccumulator a, T item) -> a.add(1)) .andCombine(LongAccumulator::add) .andDeduct(LongAccumulator::subtractAllowingOverflow) .andExportFinish(LongAccumulator::get); }
@Test public void when_counting() { validateOp(counting(), LongAccumulator::get, null, null, 1L, 2L, 2L); }
protected static boolean isEligible(String message) { try { lock.readLock().lock(); if (hashSet.contains(message)) return false; } finally { lock.readLock().unlock(); } try { lock.writeLock().lock(); if (buffer.size() >= 100) { String rem = buffer.remove(); hashSet.remove(rem); } buffer.add(message); hashSet.add(message); return true; } finally { lock.writeLock().unlock(); } }
@Test public void testBuffer1() throws Exception { assertTrue(OneTimeLogger.isEligible("Message here")); assertFalse(OneTimeLogger.isEligible("Message here")); assertTrue(OneTimeLogger.isEligible("Message here 23")); }
@SuppressWarnings("unchecked") public static <S, F> S visit(final Schema schema, final Visitor<S, F> visitor) { final BiFunction<Visitor<?, ?>, Schema, Object> handler = HANDLER.get(schema.type()); if (handler == null) { throw new UnsupportedOperationException("Unsupported schema type: " + schema.type()); } return (S) handler.apply(visitor, schema); }
@Test public void shouldVisitAll() { // Given: visitor = new Visitor<String, Integer>() { @Override public String visitSchema(final Schema schema) { return "Expected"; } }; allSchemas().forEach(schema -> { // When: final String result = SchemaWalker.visit(schema, visitor); // Then: assertThat(result, is("Expected")); }); }
public static Read<JmsRecord> read() { return new AutoValue_JmsIO_Read.Builder<JmsRecord>() .setMaxNumRecords(Long.MAX_VALUE) .setCoder(SerializableCoder.of(JmsRecord.class)) .setCloseTimeout(DEFAULT_CLOSE_TIMEOUT) .setRequiresDeduping(false) .setMessageMapper( new MessageMapper<JmsRecord>() { @Override public JmsRecord mapMessage(Message message) throws Exception { TextMessage textMessage = (TextMessage) message; Map<String, Object> properties = new HashMap<>(); @SuppressWarnings("rawtypes") Enumeration propertyNames = textMessage.getPropertyNames(); while (propertyNames.hasMoreElements()) { String propertyName = (String) propertyNames.nextElement(); properties.put(propertyName, textMessage.getObjectProperty(propertyName)); } return new JmsRecord( textMessage.getJMSMessageID(), textMessage.getJMSTimestamp(), textMessage.getJMSCorrelationID(), textMessage.getJMSReplyTo(), textMessage.getJMSDestination(), textMessage.getJMSDeliveryMode(), textMessage.getJMSRedelivered(), textMessage.getJMSType(), textMessage.getJMSExpiration(), textMessage.getJMSPriority(), properties, textMessage.getText()); } }) .build(); }
@Test public void testCustomAutoscaler() throws IOException { long excpectedTotalBacklogBytes = 1111L; AutoScaler autoScaler = mock(DefaultAutoscaler.class); when(autoScaler.getTotalBacklogBytes()).thenReturn(excpectedTotalBacklogBytes); JmsIO.Read spec = JmsIO.read() .withConnectionFactory(connectionFactory) .withUsername(USERNAME) .withPassword(PASSWORD) .withQueue(QUEUE) .withAutoScaler(autoScaler); JmsIO.UnboundedJmsSource source = new JmsIO.UnboundedJmsSource(spec); JmsIO.UnboundedJmsReader reader = source.createReader(PipelineOptionsFactory.create(), null); // start the reader and check getSplitBacklogBytes and getTotalBacklogBytes values reader.start(); verify(autoScaler, times(1)).start(); assertEquals(excpectedTotalBacklogBytes, reader.getTotalBacklogBytes()); verify(autoScaler, times(1)).getTotalBacklogBytes(); reader.close(); verify(autoScaler, times(1)).stop(); }
public NumericIndicator abs() { return NumericIndicator.of(UnaryOperation.abs(this)); }
@Test public void abs() { final NumericIndicator numericIndicator = NumericIndicator.of(cp1); final NumericIndicator dynamicOp = numericIndicator.abs(); assertNumEquals(1, dynamicOp.getValue(0)); assertNumEquals(2, dynamicOp.getValue(series.getBarCount() - 1)); }
public static boolean requireGlobalLock() { return CONTEXT_HOLDER.get(KEY_GLOBAL_LOCK_FLAG) != null; }
@Test public void testRequireGlobalLock() { RootContext.bindGlobalLockFlag(); assertThat(RootContext.requireGlobalLock()).isEqualTo(true); RootContext.unbindGlobalLockFlag(); assertThat(RootContext.requireGlobalLock()).isEqualTo(false); }
static SerializableFunction<Double, Double> getNumericPredictorEntry(final NumericPredictor numericPredictor) { boolean withExponent = !Objects.equals(1, numericPredictor.getExponent()); if (withExponent) { return input -> KiePMMLRegressionTable.evaluateNumericWithExponent(input, numericPredictor.getCoefficient().doubleValue(), numericPredictor.getExponent().doubleValue()); } else { return input -> KiePMMLRegressionTable.evaluateNumericWithoutExponent(input, numericPredictor.getCoefficient().doubleValue()); } }
@Test void getNumericPredictorEntryWithExponent() { String predictorName = "predictorName"; int exponent = 2; double coefficient = 1.23; NumericPredictor numericPredictor = PMMLModelTestUtils.getNumericPredictor(predictorName, exponent, coefficient); SerializableFunction<Double, Double> retrieved = KiePMMLRegressionTableFactory.getNumericPredictorEntry(numericPredictor); assertThat(retrieved).isNotNull(); }
ByteBuffer serialize(final int endPadding) { final int sizeOfValueLength = Integer.BYTES; final int sizeOfPriorValue = priorValue == null ? 0 : priorValue.length; final int sizeOfOldValue = oldValue == null || priorValue == oldValue ? 0 : oldValue.length; final int sizeOfNewValue = newValue == null ? 0 : newValue.length; final byte[] serializedContext = recordContext.serialize(); final ByteBuffer buffer = ByteBuffer.allocate( serializedContext.length + sizeOfValueLength + sizeOfPriorValue + sizeOfValueLength + sizeOfOldValue + sizeOfValueLength + sizeOfNewValue + endPadding ); buffer.put(serializedContext); addValue(buffer, priorValue); if (oldValue == null) { buffer.putInt(NULL_VALUE_SENTINEL); } else if (Arrays.equals(priorValue, oldValue)) { buffer.putInt(OLD_PREV_DUPLICATE_VALUE_SENTINEL); } else { buffer.putInt(sizeOfOldValue); buffer.put(oldValue); } addValue(buffer, newValue); return buffer; }
@Test public void shouldCompactDuplicates() { final ProcessorRecordContext context = new ProcessorRecordContext(0L, 0L, 0, "topic", new RecordHeaders()); final byte[] serializedContext = context.serialize(); final byte[] duplicate = {(byte) 5}; final byte[] bytes = new BufferValue(duplicate, duplicate, null, context).serialize(0).array(); final byte[] withoutContext = Arrays.copyOfRange(bytes, serializedContext.length, bytes.length); assertThat(withoutContext, is(ByteBuffer.allocate(Integer.BYTES * 3 + 1).putInt(1).put(duplicate).putInt(-2).putInt(-1).array())); }
@Override @CacheEvict(value = RedisKeyConstants.PERMISSION_MENU_ID_LIST, key = "#createReqVO.permission", condition = "#createReqVO.permission != null") public Long createMenu(MenuSaveVO createReqVO) { // 校验父菜单存在 validateParentMenu(createReqVO.getParentId(), null); // 校验菜单(自己) validateMenu(createReqVO.getParentId(), createReqVO.getName(), null); // 插入数据库 MenuDO menu = BeanUtils.toBean(createReqVO, MenuDO.class); initMenuProperty(menu); menuMapper.insert(menu); // 返回 return menu.getId(); }
@Test public void testCreateMenu_success() { // mock 数据(构造父菜单) MenuDO menuDO = buildMenuDO(MenuTypeEnum.MENU, "parent", 0L); menuMapper.insert(menuDO); Long parentId = menuDO.getId(); // 准备参数 MenuSaveVO reqVO = randomPojo(MenuSaveVO.class, o -> { o.setParentId(parentId); o.setName("testSonName"); o.setType(MenuTypeEnum.MENU.getType()); }).setId(null); // 防止 id 被赋值 Long menuId = menuService.createMenu(reqVO); // 校验记录的属性是否正确 MenuDO dbMenu = menuMapper.selectById(menuId); assertPojoEquals(reqVO, dbMenu, "id"); }
@Override public InternalSerializationService build() { initVersions(); if (config != null) { addConfigDataSerializableFactories(dataSerializableFactories, config, classLoader); addConfigPortableFactories(portableFactories, config, classLoader); classDefinitions.addAll(config.getClassDefinitions()); } InputOutputFactory inputOutputFactory = createInputOutputFactory(); InternalSerializationService ss = createSerializationService(inputOutputFactory, notActiveExceptionSupplier); registerSerializerHooks(ss); if (config != null) { if (config.getGlobalSerializerConfig() != null) { GlobalSerializerConfig globalSerializerConfig = config.getGlobalSerializerConfig(); Serializer serializer = globalSerializerConfig.getImplementation(); if (serializer == null) { try { serializer = ClassLoaderUtil.newInstance(classLoader, globalSerializerConfig.getClassName()); } catch (Exception e) { throw new HazelcastSerializationException(e); } } if (serializer instanceof HazelcastInstanceAware aware) { aware.setHazelcastInstance(hazelcastInstance); } ((AbstractSerializationService) ss) .registerGlobal(serializer, globalSerializerConfig.isOverrideJavaSerialization()); } } return ss; }
@Test public void test_byteOrderIsOverridden_whenBigEndian() { System.setProperty(BYTE_ORDER_OVERRRIDE_PROPERTY, "BIG_ENDIAN"); try { InternalSerializationService serializationService = getSerializationServiceBuilder().build(); assertEquals(ByteOrder.BIG_ENDIAN, serializationService.getByteOrder()); } finally { System.clearProperty(BYTE_ORDER_OVERRRIDE_PROPERTY); } }
Flux<String> getNotifiersBySubscriber(Subscriber subscriber, Reason reason) { var reasonType = reason.getSpec().getReasonType(); return userNotificationPreferenceService.getByUser(subscriber.name()) .map(UserNotificationPreference::getReasonTypeNotifier) .map(reasonTypeNotification -> reasonTypeNotification.getNotifiers(reasonType)) .flatMapMany(Flux::fromIterable); }
@Test public void testGetNotifiersBySubscriber() { UserNotificationPreference preference = new UserNotificationPreference(); when(userNotificationPreferenceService.getByUser(any())) .thenReturn(Mono.just(preference)); var reason = new Reason(); reason.setMetadata(new Metadata()); reason.getMetadata().setName("reason-a"); reason.setSpec(new Reason.Spec()); reason.getSpec().setReasonType("new-reply-on-comment"); var subscriber = new Subscriber(UserIdentity.anonymousWithEmail("A"), "fake-name"); notificationCenter.getNotifiersBySubscriber(subscriber, reason) .collectList() .as(StepVerifier::create) .consumeNextWith(notifiers -> { assertThat(notifiers).hasSize(1); assertThat(notifiers.get(0)).isEqualTo("default-email-notifier"); }) .verifyComplete(); verify(userNotificationPreferenceService).getByUser(eq(subscriber.name())); }
public void initialize(String[] args) throws Exception { // set meta dir first. // we already set these variables in constructor. but GlobalStateMgr is a singleton class. // so they may be set before Config is initialized. // set them here again to make sure these variables use values in fe.conf. setMetaDir(); // must judge whether it is first time start here before initializing GlobalStateMgr. // Possibly remove clusterId and role to ensure that the system is not left in a half-initialized state. boolean isFirstTimeStart = nodeMgr.isVersionAndRoleFilesNotExist(); try { // 0. get local node and helper node info nodeMgr.initialize(args); // 1. create dirs and files if (Config.edit_log_type.equalsIgnoreCase("bdb")) { File imageDir = new File(this.imageDir); if (!imageDir.exists()) { imageDir.mkdirs(); } File imageV2Dir = new File(this.imageDir + "/v2"); if (!imageV2Dir.exists()) { imageV2Dir.mkdirs(); } } else { LOG.error("Invalid edit log type: {}", Config.edit_log_type); System.exit(-1); } // init plugin manager pluginMgr.init(); auditEventProcessor.start(); // 2. get cluster id and role (Observer or Follower) nodeMgr.getClusterIdAndRoleOnStartup(); // 3. Load image first and replay edits initJournal(); loadImage(this.imageDir); // load image file // 4. create load and export job label cleaner thread createLabelCleaner(); // 5. create txn timeout checker thread createTxnTimeoutChecker(); // 6. start task cleaner thread createTaskCleaner(); createTableKeeper(); // 7. init starosAgent if (RunMode.isSharedDataMode() && !starOSAgent.init(null)) { LOG.error("init starOSAgent failed"); System.exit(-1); } } catch (Exception e) { try { if (isFirstTimeStart) { // If it is the first time we start, we remove the cluster ID and role // to prevent leaving the system in an inconsistent state. nodeMgr.removeClusterIdAndRole(); } } catch (Throwable t) { e.addSuppressed(t); } throw e; } }
@Test public void testErrorOccursWhileRemovingClusterIdAndRoleWhenStartAtFirstTime() { final String removeFileErrorMessage = "Failed to delete role and version files."; NodeMgr nodeMgr = Mockito.spy(new NodeMgr()); Mockito.doThrow(new RuntimeException(removeFileErrorMessage)).when(nodeMgr).removeClusterIdAndRole(); GlobalStateMgr globalStateMgr = new MyGlobalStateMgr(true, nodeMgr); Assert.assertTrue(nodeMgr.isVersionAndRoleFilesNotExist()); try { globalStateMgr.initialize(new String[0]); } catch (Exception e) { Assert.assertTrue(e instanceof UnsupportedOperationException); Assert.assertEquals(MyGlobalStateMgr.ERROR_MESSAGE, e.getMessage()); Throwable[] suppressedExceptions = e.getSuppressed(); Assert.assertEquals(1, suppressedExceptions.length); Assert.assertTrue(suppressedExceptions[0] instanceof RuntimeException); Assert.assertEquals(removeFileErrorMessage, suppressedExceptions[0].getMessage()); } }
public void setActionPermissionResolver(ActionPermissionResolver actionPermissionResolver) { this.actionPermissionResolver = actionPermissionResolver; }
@Test public void testSetActionPermissionResolver() { ActionPermissionResolver resolver = new DestinationActionPermissionResolver(); filter.setActionPermissionResolver(resolver); assertSame(resolver, filter.getActionPermissionResolver()); }
public static Ip6Prefix valueOf(byte[] address, int prefixLength) { return new Ip6Prefix(Ip6Address.valueOf(address), prefixLength); }
@Test(expected = IllegalArgumentException.class) public void testInvalidValueOfStringTooLongPrefixLengthIPv6() { Ip6Prefix ipPrefix; ipPrefix = Ip6Prefix.valueOf("1111:2222:3333:4444:5555:6666:7777:8888/129"); }
public ProcResult fetchResultByFilter(HashMap<String, Expr> filter, ArrayList<OrderByPair> orderByPairs, LimitElement limitElement) throws AnalysisException { Preconditions.checkNotNull(db); Preconditions.checkNotNull(schemaChangeHandler); List<List<Comparable>> schemaChangeJobInfos = getOptimizeJobInfos(); //where List<List<Comparable>> jobInfos; if (filter == null || filter.size() == 0) { jobInfos = schemaChangeJobInfos; } else { jobInfos = Lists.newArrayList(); for (List<Comparable> infoStr : schemaChangeJobInfos) { if (infoStr.size() != TITLE_NAMES.size()) { LOG.warn("SchemaChangeJobInfos.size() " + schemaChangeJobInfos.size() + " not equal TITLE_NAMES.size() " + TITLE_NAMES.size()); continue; } boolean isNeed = true; for (int i = 0; i < infoStr.size(); i++) { isNeed = filterResult(TITLE_NAMES.get(i), infoStr.get(i), filter); if (!isNeed) { break; } } if (isNeed) { jobInfos.add(infoStr); } } } // order by if (orderByPairs != null) { ListComparator<List<Comparable>> comparator = null; OrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()]; comparator = new ListComparator<List<Comparable>>(orderByPairs.toArray(orderByPairArr)); Collections.sort(jobInfos, comparator); } //limit if (limitElement != null && limitElement.hasLimit()) { int beginIndex = (int) limitElement.getOffset(); int endIndex = (int) (beginIndex + limitElement.getLimit()); if (endIndex > jobInfos.size()) { endIndex = jobInfos.size(); } jobInfos = jobInfos.subList(beginIndex, endIndex); } BaseProcResult result = new BaseProcResult(); result.setNames(TITLE_NAMES); for (List<Comparable> jobInfo : jobInfos) { List<String> oneResult = new ArrayList<String>(jobInfos.size()); for (Comparable column : jobInfo) { oneResult.add(column.toString()); } result.addRow(oneResult); } return result; }
@Test public void testFetchResultByFilter() throws AnalysisException { HashMap<String, Expr> filter = Maps.newHashMap(); filter.put("jobId", new BinaryPredicate(BinaryType.EQ, new StringLiteral(), new StringLiteral("1"))); ArrayList<OrderByPair> orderByPairs = Lists.newArrayList(); orderByPairs.add(new OrderByPair(0)); LimitElement limitElement = new LimitElement(1); BaseProcResult result = (BaseProcResult) optimizeProcDir.fetchResultByFilter( filter, orderByPairs, limitElement); List<List<String>> rows = result.getRows(); List<String> list1 = rows.get(0); Assert.assertEquals(list1.size(), OptimizeProcDir.TITLE_NAMES.size()); // JobId Assert.assertEquals("1", list1.get(0)); // TableName Assert.assertEquals("tb1", list1.get(1)); // CreateTime Assert.assertEquals("2020-01-01", list1.get(2)); // FinishTime Assert.assertEquals("2020-01-01", list1.get(3)); // Operation Assert.assertEquals("ALTER", list1.get(4)); // TransactionId Assert.assertEquals("0", list1.get(5)); // State Assert.assertEquals("FINISHED", list1.get(6)); // Msg Assert.assertEquals("", list1.get(7)); // Progress Assert.assertEquals("100", list1.get(8)); // Timeout Assert.assertEquals("10000", list1.get(9)); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { return this.list(directory, listener, String.valueOf(Path.DELIMITER)); }
@Test public void testListEncodedCharacterFolderVersioned() throws Exception { final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("us-east-1"); final Path placeholder = new GoogleStorageDirectoryFeature(session).mkdir( new Path(container, String.format("%s +", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.directory)), new TransferStatus()); assertTrue(new GoogleStorageObjectListService(session).list(container, new DisabledListProgressListener()).contains(placeholder)); new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(placeholder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public List<String> toBatchTaskArgumentString() { List<String> res = new ArrayList<>(Arrays.asList( CLUSTER_LIMIT_FLAG, String.valueOf(mClusterLimit), CLUSTER_START_DELAY_FLAG, mClusterStartDelay, BENCH_TIMEOUT, mBenchTimeout, START_MS_FLAG, String.valueOf(mStartMs))); if (!mProfileAgent.isEmpty()) { res.add(PROFILE_AGENT); res.add(mProfileAgent); } if (!mId.equals(DEFAULT_TASK_ID)) { res.add(ID_FLAG); res.add(mId); } if (!mIndex.equals(DEFAULT_TASK_ID)) { res.add(INDEX_FLAG); res.add(mIndex); } if (!mJavaOpts.isEmpty()) { for (String s : mJavaOpts) { res.add(JAVA_OPT_FLAG); res.add(s); } } if (mCluster) { res.add(CLUSTER_FLAG); } if (mDistributed) { res.add(DISTRIBUTED_FLAG); } if (mInProcess) { res.add(IN_PROCESS_FLAG); } if (mHelp) { res.add(HELP_FLAG); } return res; }
@Test public void parseParameterToArgumentWithJavaOPT() { ImmutableList<String> options = ImmutableList.of(" TestOption1", " TestOption2", " TestOption3", " TestOption4", " TestOption5"); List<String> inputArgs = new ArrayList<>(Arrays.asList( // keys with values "--cluster-limit", "4", "--cluster-start-delay", "5s", "--id", "TestID", // keys with no values "--cluster")); for (String s : options) { inputArgs.add("--java-opt"); inputArgs.add(s); } JCommander jc = new JCommander(this); jc.parse(inputArgs.toArray(new String[0])); List<String> outputArgs = mBaseParameter.toBatchTaskArgumentString(); // validate the --java-opt List<String> optionList = new ArrayList<>(); for (int i = 0; i < outputArgs.size(); i++) { if (outputArgs.get(i).equals(JAVA_OPT_FLAG)) { optionList.add(outputArgs.get(i + 1)); } } assertEquals(optionList.size(), 5); for (String option : optionList) { assertTrue(options.contains(option)); } }
public static Map<String, SearchQueryField> createFromEntityAttributes(final List<EntityAttribute> attributes) { Map<String, SearchQueryField> dbFieldMapping = new HashMap<>(); attributes.stream() .filter(attr -> Objects.nonNull(attr.searchable())) .filter(EntityAttribute::searchable) .forEach(attr -> { final SearchQueryField searchQueryField = SearchQueryField.create( attr.id(), attr.type() ); dbFieldMapping.put(attr.id(), searchQueryField); if (!attr.title().contains(" ")) { dbFieldMapping.put(attr.title().toLowerCase(Locale.ROOT), searchQueryField); } }); return dbFieldMapping; }
@Test void ignoresUnsearchableFields() { final Map<String, SearchQueryField> result = DbFieldMappingCreator.createFromEntityAttributes( List.of( EntityAttribute.builder().id("f1").title("Searchable flag not set").build(), EntityAttribute.builder().id("f2").title("Searchable flag set to false").searchable(false).build() ) ); assertThat(result).isEmpty(); }
public FEELFnResult<List> invoke(@ParameterName("list") List list, @ParameterName("position") BigDecimal position, @ParameterName("newItem") Object newItem) { if (list == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", CANNOT_BE_NULL)); } if (position == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", CANNOT_BE_NULL)); } int intPosition = position.intValue(); if (intPosition == 0 || Math.abs(intPosition) > list.size()) { String paramProblem = String.format("%s outside valid boundaries (1-%s)", intPosition, list.size()); return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", paramProblem)); } Object e = NumberEvalHelper.coerceNumber(newItem); List toReturn = new ArrayList(list); int replacementPosition = intPosition > 0 ? intPosition -1 : list.size() - Math.abs(intPosition); toReturn.set(replacementPosition, e); return FEELFnResult.ofResult(toReturn); }
@Test void invokeReplaceByPositionWithNull() { List list = getList(); List expected = new ArrayList<>(list); expected.set(1, null); FunctionTestUtil.assertResult(listReplaceFunction.invoke(list, BigDecimal.valueOf(2), null), expected); }
public static ReplaceAll replaceAll(String regex, String replacement) { return replaceAll(Pattern.compile(regex), replacement); }
@Test @Category(NeedsRunner.class) public void testReplaceAll() { PCollection<String> output = p.apply(Create.of("xj", "yj", "zj")).apply(Regex.replaceAll("[xyz]", "new")); PAssert.that(output).containsInAnyOrder("newj", "newj", "newj"); p.run(); }
public boolean shouldIgnoreFailures() { return ignoreFailures; }
@Test public void testSetIgnoreFailure() { final DistCpOptions.Builder builder = new DistCpOptions.Builder( Collections.singletonList(new Path("hdfs://localhost:8020/source")), new Path("hdfs://localhost:8020/target/")); Assert.assertFalse(builder.build().shouldIgnoreFailures()); builder.withIgnoreFailures(true); Assert.assertTrue(builder.build().shouldIgnoreFailures()); }
public void makeReadOnly() { key.data().makeReadOnly(); if (params != null) { params.data().makeReadOnly(); } }
@Test public void testMakeReadOnly() { DataMap keyDataMap = new DataMap(); keyDataMap.put("key", "key-value"); EmptyRecord key = new EmptyRecord(keyDataMap); DataMap paramsDataMap = new DataMap(); paramsDataMap.put("params", "params-value"); EmptyRecord params = new EmptyRecord(paramsDataMap); ComplexResourceKey<EmptyRecord, EmptyRecord> complexResourceKey = new ComplexResourceKey<>(key, params); complexResourceKey.makeReadOnly(); try { key.data().put("key", "new key value"); Assert.fail("Should not be able to update the key after the ComplexResourceKey has been made read only!"); } catch (UnsupportedOperationException e) { } try { params.data().put("params", "new params value"); Assert.fail("Should not be able to update the params after the ComplexResourceKey has been made read only!"); } catch (UnsupportedOperationException e) { } }
public static native int setenv(String name, String value);
@Test void testSetenv() { CLibrary.setenv("MY_NAME", "myValue"); }
@Override public void onProjectsDeleted(Set<DeletedProject> projects) { checkNotNull(projects, "projects can't be null"); if (projects.isEmpty()) { return; } Arrays.stream(listeners) .forEach(safelyCallListener(listener -> listener.onProjectsDeleted(projects))); }
@Test public void onProjectsDeleted_has_no_effect_if_set_is_empty() { underTestNoListeners.onProjectsDeleted(Collections.emptySet()); underTestWithListeners.onProjectsDeleted(Collections.emptySet()); verifyNoInteractions(listener1, listener2, listener3); }
static JsonNode renderApplicationConfigs(ApplicationMetadataConfig metaConfig, ApplicationUserdataConfig userConfig) { ObjectNode vespa = jsonMapper.createObjectNode(); vespa.put("version", Vtag.currentVersion.toString()); ObjectNode meta = jsonMapper.createObjectNode(); meta.put("name", metaConfig.name()); meta.put("user", metaConfig.user()); meta.put("path", metaConfig.path()); meta.put("generation", metaConfig.generation()); meta.put("timestamp", metaConfig.timestamp()); meta.put("date", new Date(metaConfig.timestamp()).toString()); meta.put("checksum", metaConfig.checksum()); ObjectNode user = jsonMapper.createObjectNode(); user.put("version", userConfig.version()); ObjectNode application = jsonMapper.createObjectNode(); application.set("vespa", vespa); application.set("meta", meta); application.set("user", user); return application; }
@Test void application_configs_are_rendered() { ApplicationMetadataConfig metaConfig = new ApplicationMetadataConfig( new ApplicationMetadataConfig.Builder() .checksum("abc") .name("app") .path("/a/b/c") .timestamp(3000) .user("donald")); ApplicationUserdataConfig userConfig = new ApplicationUserdataConfig( new ApplicationUserdataConfig.Builder() .version("v1")); String json = ApplicationStatusHandler.renderApplicationConfigs(metaConfig, userConfig).toString(); assertTrue(json.contains("version")); assertTrue(json.contains("meta")); assertTrue(json.contains("abc")); assertTrue(json.contains("app")); assertTrue(json.contains("/a/b/c")); assertTrue(json.contains("3000")); assertTrue(json.contains("donald")); assertTrue(json.contains("v1")); }
@Override public Select select() { return select; }
@Test void test() { QueryAnalyzerImpl analyzer = new QueryAnalyzerImpl(database, "select name n from s_test t"); assertNotNull(analyzer.select().table.alias, "t"); assertNotNull(analyzer.select().table.metadata.getName(), "s_test"); assertNotNull(analyzer.select().getColumns().get("n")); }
static JavaType constructType(Type type) { try { return constructTypeInner(type); } catch (Exception e) { throw new InvalidDataTableTypeException(type, e); } }
@Test void unknown_types_are_other_type() { JavaType javaType = TypeFactory.constructType(UNKNOWN_TYPE); assertThat(javaType.getClass(), equalTo(TypeFactory.OtherType.class)); assertThat(javaType.getOriginal(), is(UNKNOWN_TYPE)); }
public static List<ExportPackages.Export> parseExports(String exportAttribute) { ParsingContext p = new ParsingContext(exportAttribute.trim()); List<ExportPackages.Export> exports = parseExportPackage(p); if (exports.isEmpty()) { p.fail("Expected a list of exports"); } else if (p.atEnd() == false) { p.fail("Exports not fully processed"); } return exports; }
@Test void require_that_version_is_parsed_correctly() { List<Export> exports = ExportPackageParser.parseExports("com.yahoo.sample.exported.package;version=\"1.2.3.sample\""); assertEquals(1, exports.size()); Export export = exports.get(0); assertTrue(export.getPackageNames().contains("com.yahoo.sample.exported.package")); assertTrue(export.getParameters().contains(versionParameter)); }
public static boolean isNotEmpty(Collection collection) { return !isEmpty(collection); }
@Test public void testIsNotEmpty() { assertFalse(isNotEmpty(Collections.emptyList())); assertTrue(isNotEmpty(singletonList(23))); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } final P4CounterModel other = (P4CounterModel) obj; return Objects.equals(this.id, other.id) && Objects.equals(this.counterType, other.counterType) && Objects.equals(this.unit, other.unit) && Objects.equals(this.table, other.table) && Objects.equals(this.size, other.size); }
@Test public void testEquals() { new EqualsTester() .addEqualityGroup(counterModel, sameAsCounterModel) .addEqualityGroup(counterModel2) .addEqualityGroup(counterModel3) .addEqualityGroup(counterModel4) .addEqualityGroup(counterModel5) .testEquals(); }
public static String getInputParameters(Properties properties) { boolean logAllParameters = ConvertUtils.toBoolean(properties.getProperty(PropertyKeyConst.LOG_ALL_PROPERTIES), false); StringBuilder result = new StringBuilder(); if (logAllParameters) { result.append( "Log nacos client init properties with Full mode, This mode is only used for debugging and troubleshooting. "); result.append( "Please close this mode by removing properties `logAllProperties` after finishing debug or troubleshoot.\n"); result.append("Nacos client all init properties: \n"); properties.forEach( (key, value) -> result.append("\t").append(key.toString()).append("=").append(value.toString()) .append("\n")); } else { result.append("Nacos client key init properties: \n"); appendKeyParameters(result, properties, PropertyKeyConst.SERVER_ADDR); appendKeyParameters(result, properties, PropertyKeyConst.NAMESPACE); appendKeyParameters(result, properties, PropertyKeyConst.ENDPOINT); appendKeyParameters(result, properties, PropertyKeyConst.ENDPOINT_PORT); appendKeyParameters(result, properties, PropertyKeyConst.USERNAME); appendKeyParameters(result, properties, PropertyKeyConst.PASSWORD); appendKeyParameters(result, properties, PropertyKeyConst.ACCESS_KEY); appendKeyParameters(result, properties, PropertyKeyConst.SECRET_KEY); appendKeyParameters(result, properties, PropertyKeyConst.RAM_ROLE_NAME); appendKeyParameters(result, properties, PropertyKeyConst.SIGNATURE_REGION_ID); } return result.toString(); }
@Test void testGetInputParametersWithFullMode() { Properties properties = new Properties(); properties.setProperty("testKey", "testValue"); properties.setProperty(PropertyKeyConst.LOG_ALL_PROPERTIES, "true"); NacosClientProperties clientProperties = NacosClientProperties.PROTOTYPE.derive(properties); String actual = ParamUtil.getInputParameters(clientProperties.asProperties()); assertTrue(actual.startsWith("Log nacos client init properties with Full mode, This mode is only used for debugging and troubleshooting.")); assertTrue(actual.contains("\ttestKey=testValue\n")); Properties envProperties = clientProperties.getProperties(SourceType.ENV); String envCaseKey = envProperties.stringPropertyNames().iterator().next(); String envCaseValue = envProperties.getProperty(envCaseKey); assertTrue(actual.contains(String.format("\t%s=%s\n", envCaseKey, envCaseValue))); }
public List<ResContainer> makeResourcesXml(JadxArgs args) { Map<String, ICodeWriter> contMap = new HashMap<>(); for (ResourceEntry ri : resStorage.getResources()) { if (SKIP_RES_TYPES.contains(ri.getTypeName())) { continue; } String fn = getFileName(ri); ICodeWriter cw = contMap.get(fn); if (cw == null) { cw = new SimpleCodeWriter(args); cw.add("<?xml version=\"1.0\" encoding=\"utf-8\"?>"); cw.startLine("<resources>"); cw.incIndent(); contMap.put(fn, cw); } addValue(cw, ri); } List<ResContainer> files = new ArrayList<>(contMap.size()); for (Map.Entry<String, ICodeWriter> entry : contMap.entrySet()) { String fileName = entry.getKey(); ICodeWriter content = entry.getValue(); content.decIndent(); content.startLine("</resources>"); ICodeInfo codeInfo = content.finish(); files.add(ResContainer.textResource(fileName, codeInfo)); } Collections.sort(files); return files; }
@Test void testSimpleAttr() { ResourceStorage resStorage = new ResourceStorage(); ResourceEntry re = new ResourceEntry(2130903103, "jadx.gui.app", "attr", "size", ""); re.setNamedValues(Lists.list(new RawNamedValue(16777216, new RawValue(16, 64)))); resStorage.add(re); ValuesParser vp = new ValuesParser(null, resStorage.getResourcesNames()); ResXmlGen resXmlGen = new ResXmlGen(resStorage, vp); List<ResContainer> files = resXmlGen.makeResourcesXml(args); assertThat(files).hasSize(1); assertThat(files.get(0).getName()).isEqualTo("res/values/attrs.xml"); String input = files.get(0).getText().toString(); assertThat(input).isEqualTo("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" + "<resources>\n" + " <attr name=\"size\" format=\"dimension\">\n" + " </attr>\n" + "</resources>"); }
public static Builder custom() { return new Builder(); }
@Test public void builderTimeoutIsNull() { exception.expect(NullPointerException.class); exception.expectMessage(TIMEOUT_DURATION_MUST_NOT_BE_NULL); TimeLimiterConfig.custom() .timeoutDuration(null); }
static MapKeyLoader.Role assignRole(boolean isPartitionOwner, boolean isMapNamePartition, boolean isMapNamePartitionFirstReplica) { if (isMapNamePartition) { if (isPartitionOwner) { // map-name partition owner is the SENDER return MapKeyLoader.Role.SENDER; } else { if (isMapNamePartitionFirstReplica) { // first replica of the map-name partition is the SENDER_BACKUP return MapKeyLoader.Role.SENDER_BACKUP; } else { // other replicas of the map-name partition do not have a role return MapKeyLoader.Role.NONE; } } } else { // ordinary partition owners are RECEIVERs, otherwise no role return isPartitionOwner ? MapKeyLoader.Role.RECEIVER : MapKeyLoader.Role.NONE; } }
@Test public void assignRole_SENDER_BACKUP() { boolean isPartitionOwner = false; boolean isMapNamePartition = true; boolean isMapNamePartitionFirstReplica = true; Role role = MapKeyLoaderUtil.assignRole(isPartitionOwner, isMapNamePartition, isMapNamePartitionFirstReplica); assertEquals(SENDER_BACKUP, role); }
@Override public MeterCellId allocateMeterId(DeviceId deviceId, MeterScope meterScope) { if (userDefinedIndexMode) { log.warn("Unable to allocate meter id when user defined index mode is enabled"); return null; } MeterTableKey meterTableKey = MeterTableKey.key(deviceId, meterScope); MeterCellId meterCellId; long id; // First, search for reusable key meterCellId = firstReusableMeterId(meterTableKey); if (meterCellId != null) { return meterCellId; } // If there was no reusable meter id we have to generate a new value // using start and end index as lower and upper bound respectively. long startIndex = getStartIndex(meterTableKey); long endIndex = getEndIndex(meterTableKey); // If the device does not give us MeterFeatures fallback to queryMeters if (startIndex == -1L || endIndex == -1L) { // Only meaningful for OpenFlow today long maxMeters = queryMaxMeters(deviceId); if (maxMeters == 0L) { return null; } else { // OpenFlow meter index starts from 1, ends with max startIndex = 1L; endIndex = maxMeters; } } do { id = meterIdGenerators.getAndIncrement(meterTableKey); } while (id < startIndex); if (id > endIndex) { return null; } // For backward compatibility if we are using global scope, // return a MeterId, otherwise we create a PiMeterCellId if (meterScope.isGlobal()) { return MeterId.meterId(id); } else { return PiMeterCellId.ofIndirect(PiMeterId.of(meterScope.id()), id); } }
@Test public void testMaxMeterError() { initMeterStore(false); assertThat(mid1, is(meterStore.allocateMeterId(did1, MeterScope.globalScope()))); assertThat(mid2, is(meterStore.allocateMeterId(did1, MeterScope.globalScope()))); assertThat(mid3, is(meterStore.allocateMeterId(did1, MeterScope.globalScope()))); assertNull(meterStore.allocateMeterId(did1, MeterScope.globalScope())); }
@Override public List<Runnable> shutdownNow() { Collection<ShutdownAwarePlugin> shutdownAwarePluginList = threadPoolPluginManager.getShutdownAwarePluginList(); shutdownAwarePluginList.forEach(aware -> aware.beforeShutdown(this)); List<Runnable> tasks = super.shutdownNow(); shutdownAwarePluginList.forEach(aware -> aware.afterShutdown(this, tasks)); return tasks; }
@Test public void testInvokeTestShutdownAwarePluginWhenShutdownNow() throws InterruptedException { TestShutdownAwarePlugin plugin = new TestShutdownAwarePlugin(); executor.register(plugin); executor.shutdownNow(); if (executor.awaitTermination(500L, TimeUnit.MILLISECONDS)) { Assert.assertEquals(3, plugin.getInvokeCount().get()); } }
@Override public DbEntitiesCatalog get() { final Stopwatch stopwatch = Stopwatch.createStarted(); final DbEntitiesCatalog catalog = scan(packagesToScan, packagesToExclude, chainingClassLoader); stopwatch.stop(); LOG.info("{} entities have been scanned and added to DB Entity Catalog, it took {}", catalog.size(), stopwatch); return catalog; }
@Test void testScansEntitiesWithDefaultTitleFieldProperly() { DbEntitiesScanner scanner = new DbEntitiesScanner(new String[]{"org.graylog2.indexer.indexset"}, new String[]{}); final DbEntitiesCatalog dbEntitiesCatalog = scanner.get(); final DbEntityCatalogEntry entryByCollectionName = dbEntitiesCatalog.getByCollectionName("index_sets").get(); assertEquals(new DbEntityCatalogEntry("index_sets", "title", IndexSetConfig.class, INDEXSETS_READ), entryByCollectionName); }
@VisibleForTesting static Map<String, Map.Entry<String, String>> buildTransformIOMap( Pipeline pipeline, ConfigContext ctx) { final Map<String, Map.Entry<String, String>> pTransformToInputOutputMap = new HashMap<>(); final SamzaPipelineTranslator.TransformVisitorFn configFn = new SamzaPipelineTranslator.TransformVisitorFn() { @Override public <T extends PTransform<?, ?>> void apply( T transform, TransformHierarchy.Node node, Pipeline pipeline, TransformTranslator<T> translator) { ctx.setCurrentTransform(node.toAppliedPTransform(pipeline)); List<String> inputs = getIOPValueList(node.getInputs()).get(); List<String> outputs = getIOPValueList(node.getOutputs()).get(); pTransformToInputOutputMap.put( node.getFullName(), new AbstractMap.SimpleEntry<>( String.join(TRANSFORM_IO_MAP_DELIMITER, inputs), String.join(TRANSFORM_IO_MAP_DELIMITER, outputs))); ctx.clearCurrentTransform(); } }; final SamzaPipelineTranslator.SamzaPipelineVisitor visitor = new SamzaPipelineTranslator.SamzaPipelineVisitor(configFn); pipeline.traverseTopologically(visitor); return pTransformToInputOutputMap; }
@Test public void testBeamTransformIOConfigGen() { SamzaPipelineOptions options = PipelineOptionsFactory.create().as(SamzaPipelineOptions.class); options.setJobName("TestEnvConfig"); options.setRunner(SamzaRunner.class); options.setSamzaExecutionEnvironment(SamzaExecutionEnvironment.LOCAL); Pipeline pipeline = Pipeline.create(options); pipeline.apply(Impulse.create()).apply(Filter.by(Objects::nonNull)); pipeline.replaceAll(SamzaTransformOverrides.getDefaultOverrides()); final Map<PValue, String> idMap = PViewToIdMapper.buildIdMap(pipeline); final Set<String> nonUniqueStateIds = StateIdParser.scan(pipeline); final ConfigContext configCtx = new ConfigContext(idMap, nonUniqueStateIds, options); final ConfigBuilder configBuilder = new ConfigBuilder(options); SamzaPipelineTranslator.createConfig(pipeline, configCtx, configBuilder); final Map<String, Map.Entry<String, String>> transformInputOutput = PipelineJsonRenderer.buildTransformIOMap(pipeline, configCtx); assertEquals(2, transformInputOutput.size()); assertEquals("", transformInputOutput.get("Impulse").getKey()); // no input to impulse assertEquals( "Impulse.out", transformInputOutput.get("Impulse").getValue()); // PValue for to Impulse.output // Input to Filter is PValue Output from Impulse assertEquals( "Impulse.out", transformInputOutput.get("Filter/ParDo(Anonymous)/ParMultiDo(Anonymous)").getKey()); // output PValue of filter assertEquals( "Filter/ParDo(Anonymous)/ParMultiDo(Anonymous).output", transformInputOutput.get("Filter/ParDo(Anonymous)/ParMultiDo(Anonymous)").getValue()); }
@Override public boolean test(final String functionName) { if (functionName == null || functionName.trim().isEmpty() || JAVA_RESERVED_WORDS.contains(functionName.toLowerCase()) || KSQL_RESERVED_WORDS.contains(functionName.toLowerCase())) { return false; } return isValidJavaIdentifier(functionName); }
@Test public void shouldNotAllowInvalidJavaIdentifiers() { assertFalse(validator.test("@foo")); assertFalse(validator.test("1foo")); assertFalse(validator.test("^foo")); assertFalse(validator.test("&foo")); assertFalse(validator.test("%foo")); assertFalse(validator.test("+foo")); assertFalse(validator.test("-foo")); assertFalse(validator.test("#foo")); assertFalse(validator.test("f1@%$")); }
@Override public Set<GrokPattern> loadAll() { try (DBCursor<GrokPattern> grokPatterns = dbCollection.find()) { return ImmutableSet.copyOf((Iterator<GrokPattern>) grokPatterns); } }
@Test @MongoDBFixtures("MongoDbGrokPatternServiceTest.json") public void loadAll() { final Set<GrokPattern> grokPatterns = service.loadAll(); assertThat(grokPatterns) .hasSize(3) .contains( GrokPattern.create("56250da2d400000000000001", "Test1", "[a-z]+", null), GrokPattern.create("56250da2d400000000000002", "Test2", "[a-z]+", null), GrokPattern.create("56250da2d400000000000003", "Test3", "%{Test1}-%{Test2}", "56250da2deadbeefcafebabe")); }
public static <T extends ConfigInstance> JRTClientConfigRequest createFromSub(JRTConfigSubscription<T> sub) { // TODO: Get trace from caller return JRTClientConfigRequestV3.createFromSub(sub, Trace.createNew(), compressionType, getVespaVersion()); }
@Test public void testCreateFromSub() { Class<FunctionTestConfig> clazz = FunctionTestConfig.class; final String configId = "foo"; TimingValues timingValues = new TimingValues(); JRTConfigSubscription<FunctionTestConfig> sub = new JRTConfigSubscription<>(new ConfigKey<>(clazz, configId), new JRTConfigRequester(new JRTConnectionPool(new ConfigSourceSet("tcp/localhost:12345")), timingValues), timingValues); JRTClientConfigRequest request = JRTConfigRequestFactory.createFromSub(sub); assertThat(request.getVespaVersion().get(), is(defaultVespaVersion)); }
public static String getDatabaseNamePath(final String databaseName) { return String.join("/", getShardingSphereDataNodePath(), databaseName); }
@Test void assertGetDatabaseNamePath() { assertThat(ShardingSphereDataNode.getDatabaseNamePath("db_path"), is("/statistics/databases/db_path")); }
@Override public T set(K name, V value) { throw new UnsupportedOperationException("read only"); }
@Test public void testSet() { assertThrows(UnsupportedOperationException.class, new Executable() { @Override public void execute() { HEADERS.set(new TestEmptyHeaders()); } }); }
public static <InputT, OutputT> MapElements<InputT, OutputT> via( final InferableFunction<InputT, OutputT> fn) { return new MapElements<>(fn, fn.getInputTypeDescriptor(), fn.getOutputTypeDescriptor()); }
@Test @Category(NeedsRunner.class) public void testMapInferableFunction() throws Exception { PCollection<Integer> output = pipeline .apply(Create.of(1, 2, 3)) .apply( MapElements.via( new InferableFunction<Integer, Integer>() { @Override public Integer apply(Integer input) throws Exception { return -input; } })); PAssert.that(output).containsInAnyOrder(-2, -1, -3); pipeline.run(); }
public static Mode createNoAccess() { return new Mode(); }
@Test public void createNoAccess() { Mode mode = Mode.createNoAccess(); assertEquals(Mode.Bits.NONE, mode.getOwnerBits()); assertEquals(Mode.Bits.NONE, mode.getGroupBits()); assertEquals(Mode.Bits.NONE, mode.getOtherBits()); assertEquals(0000, mode.toShort()); }
Plugin create(Options.Plugin plugin) { try { return instantiate(plugin.pluginString(), plugin.pluginClass(), plugin.argument()); } catch (IOException | URISyntaxException e) { throw new CucumberException(e); } }
@Test void fails_to_instantiate_plugin_that_wants_too_much() { PluginOption option = parse(WantsTooMuch.class.getName()); Executable testMethod = () -> fc.create(option); CucumberException exception = assertThrows(CucumberException.class, testMethod); assertThat(exception.getMessage(), is(equalTo( "class io.cucumber.core.plugin.PluginFactoryTest$WantsTooMuch must have at least one empty constructor or a constructor that declares a single parameter of one of: [class java.lang.String, class java.io.File, class java.net.URI, class java.net.URL, class java.io.OutputStream, interface java.lang.Appendable]"))); }
public OptExpression next() { // For logic scan to physical scan, we only need to match once if (isPatternWithoutChildren && groupExpressionIndex.get(0) > 0) { return null; } OptExpression expression; do { this.groupTraceKey = 0; // Match with the next groupExpression of the last group node int lastNode = this.groupExpressionIndex.size() - 1; int lastNodeIndex = this.groupExpressionIndex.get(lastNode); this.groupExpressionIndex.set(lastNode, lastNodeIndex + 1); expression = match(pattern, groupExpression); } while (expression == null && this.groupExpressionIndex.size() != 1); nextIdx++; return expression; }
@Test public void testBinderMultiDepth2Repeat1() { OptExpression expr1 = OptExpression.create(new MockOperator(OperatorType.LOGICAL_JOIN, 0), OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 1)), OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 2))); OptExpression expr2 = OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 3)); OptExpression expr3 = OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 4)); Memo memo = new Memo(); GroupExpression ge = memo.init(expr1); memo.copyIn(ge.inputAt(0), expr2); memo.copyIn(ge.inputAt(1), expr3); Pattern pattern = Pattern.create(OperatorType.LOGICAL_JOIN) .addChildren(Pattern.create(OperatorType.PATTERN_MULTI_LEAF)); Binder binder = new Binder(pattern, ge); OptExpression result; result = binder.next(); assertEquals(OperatorType.LOGICAL_JOIN, result.getOp().getOpType()); assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(0).getOp().getOpType()); assertEquals(1, ((MockOperator) result.inputAt(0).getOp()).getValue()); assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(1).getOp().getOpType()); assertEquals(2, ((MockOperator) result.inputAt(1).getOp()).getValue()); assertNull(binder.next()); }
public static L3ModificationInstruction modL3ArpOp(short op) { return new ModArpOpInstruction(L3SubType.ARP_OP, op); }
@Test public void testModArpModL3ArpOpMethod() { final Instruction instruction = Instructions.modL3ArpOp((short) 1); final L3ModificationInstruction.ModArpOpInstruction modArpEthInstruction = checkAndConvert(instruction, Instruction.Type.L3MODIFICATION, L3ModificationInstruction.ModArpOpInstruction.class); assertThat(modArpEthInstruction.subtype(), is(L3ModificationInstruction.L3SubType.ARP_OP)); assertThat(modArpEthInstruction.op(), is(1L)); }
@Override public NativeReader<?> create( CloudObject spec, @Nullable Coder<?> coder, @Nullable PipelineOptions options, @Nullable DataflowExecutionContext executionContext, DataflowOperationContext operationContext) throws Exception { coder = checkArgumentNotNull(coder); executionContext = checkArgumentNotNull(executionContext); return createImpl(spec, coder, options, executionContext, operationContext); }
@Test public void testFactoryReturnsCachedInstance() throws Exception { Coder<?> coder = WindowedValue.getFullCoder( IsmRecordCoder.of( 1, 0, ImmutableList.<Coder<?>>of(StringUtf8Coder.of()), VarLongCoder.of()), GlobalWindow.Coder.INSTANCE); String tmpFile = tmpFolder.newFile().getPath(); String anotherTmpFile = tmpFolder.newFile().getPath(); @SuppressWarnings("rawtypes") IsmReader<?> ismReader = (IsmReader) new IsmReaderFactory() .create( createSpecForFilename(tmpFile), coder, options, executionContext, operationContext); assertSame( ismReader, new IsmReaderFactory() .create( createSpecForFilename(tmpFile), coder, options, executionContext, operationContext)); assertNotSame( ismReader, new IsmReaderFactory() .create( createSpecForFilename(anotherTmpFile), coder, options, executionContext, operationContext)); }
public Iterator<Appender<E>> iteratorForAppenders() { return appenderList.iterator(); }
@Test public void testIteratorForAppenders() throws Exception { NOPAppender<TestEvent> ta = new NOPAppender<TestEvent>(); ta.start(); aai.addAppender(ta); NOPAppender<TestEvent> tab = new NOPAppender<TestEvent>(); tab.setName("test"); tab.start(); aai.addAppender(tab); Iterator<Appender<TestEvent>> iter = aai.iteratorForAppenders(); int size = 0; while (iter.hasNext()) { ++size; Appender<TestEvent> app = iter.next(); assertTrue("Bad Appender", app == ta || app == tab); } assertTrue("Incorrect number of appenders", size == 2); }
public void doesNotMatch(@Nullable String regex) { checkNotNull(regex); if (actual == null) { failWithActual("expected a string that does not match", regex); } else if (actual.matches(regex)) { failWithActual("expected not to match", regex); } }
@Test @GwtIncompatible("Pattern") public void stringDoesNotMatchPatternFailNull() { expectFailureWhenTestingThat(null).doesNotMatch(Pattern.compile(".*aaa.*")); assertFailureValue("expected a string that does not match", ".*aaa.*"); }
@Override public void createPort(Port osPort) { checkNotNull(osPort, ERR_NULL_PORT); checkArgument(!Strings.isNullOrEmpty(osPort.getId()), ERR_NULL_PORT_ID); checkArgument(!Strings.isNullOrEmpty(osPort.getNetworkId()), ERR_NULL_PORT_NET_ID); osNetworkStore.createPort(osPort); log.info(String.format(MSG_PORT, osPort.getId(), MSG_CREATED)); }
@Test(expected = NullPointerException.class) public void testCreateNullPort() { target.createPort(null); }
@Override public MetadataReport getMetadataReport(URL url) { url = url.setPath(MetadataReport.class.getName()).removeParameters(EXPORT_KEY, REFER_KEY); String key = url.toServiceString(NAMESPACE_KEY); MetadataReport metadataReport = serviceStoreMap.get(key); if (metadataReport != null) { return metadataReport; } // Lock the metadata access process to ensure a single instance of the metadata instance lock.lock(); try { metadataReport = serviceStoreMap.get(key); if (metadataReport != null) { return metadataReport; } boolean check = url.getParameter(CHECK_KEY, true) && url.getPort() != 0; try { metadataReport = createMetadataReport(url); } catch (Exception e) { if (!check) { logger.warn(PROXY_FAILED_EXPORT_SERVICE, "", "", "The metadata reporter failed to initialize", e); } else { throw e; } } if (check && metadataReport == null) { throw new IllegalStateException("Can not create metadata Report " + url); } if (metadataReport != null) { serviceStoreMap.put(key, metadataReport); } return metadataReport; } finally { // Release the lock lock.unlock(); } }
@Test void testGetForSameNamespace() { URL url1 = URL.valueOf("zookeeper://" + NetUtils.getLocalAddress().getHostName() + ":4444/org.apache.dubbo.TestService1?version=1.0.0&application=vic&namespace=test"); URL url2 = URL.valueOf("zookeeper://" + NetUtils.getLocalAddress().getHostName() + ":4444/org.apache.dubbo.TestService2?version=1.0.0&application=vic&namespace=test"); MetadataReport metadataReport1 = metadataReportFactory.getMetadataReport(url1); MetadataReport metadataReport2 = metadataReportFactory.getMetadataReport(url2); Assertions.assertEquals(metadataReport1, metadataReport2); }
@SuppressWarnings("rawtypes") public Optional<RuleConfiguration> swapToRuleConfiguration(final String ruleTypeName, final Collection<RepositoryTuple> repositoryTuples) { if (repositoryTuples.isEmpty()) { return Optional.empty(); } YamlRuleConfigurationSwapperEngine yamlSwapperEngine = new YamlRuleConfigurationSwapperEngine(); for (YamlRuleConfigurationSwapper each : ShardingSphereServiceLoader.getServiceInstances(YamlRuleConfigurationSwapper.class)) { Class<? extends YamlRuleConfiguration> yamlRuleConfigClass = getYamlRuleConfigurationClass(each); if (ruleTypeName.equals(Objects.requireNonNull(yamlRuleConfigClass.getAnnotation(RepositoryTupleEntity.class)).value())) { Optional<YamlRuleConfiguration> yamlRuleConfig = swapToYamlRuleConfiguration(repositoryTuples, yamlRuleConfigClass); return yamlRuleConfig.map(yamlSwapperEngine::swapToRuleConfiguration); } } return Optional.empty(); }
@Test void assertSwapToEmptyRuleConfiguration() { assertFalse(new RepositoryTupleSwapperEngine().swapToRuleConfiguration("leaf", Collections.emptyList()).isPresent()); }
@Override public String retrieveIPfilePath(String id, String dstDir, Map<Path, List<String>> localizedResources) { // Assume .aocx IP file is distributed by DS to local dir String ipFilePath = null; LOG.info("Got environment: " + id + ", search IP file in localized resources"); if (null == id || id.isEmpty()) { LOG.warn("IP_ID environment is empty, skip downloading"); return null; } if (localizedResources != null) { Optional<Path> aocxPath = localizedResources .keySet() .stream() .filter(path -> matchesIpid(path, id)) .findFirst(); if (aocxPath.isPresent()) { ipFilePath = aocxPath.get().toString(); LOG.info("Found: {}", ipFilePath); } else { LOG.warn("Requested IP file not found"); } } else { LOG.warn("Localized resource is null!"); } return ipFilePath; }
@Test public void testLocalizedIPfileFound() { Map<Path, List<String>> resources = createResources(); String path = plugin.retrieveIPfilePath("fpga", "workDir", resources); assertEquals("Retrieved IP file path", "/test/fpga.aocx", path); }
@Override public Object next() { if (_numberOfValuesPerEntry == 1) { return getNextBooleanAsInteger(); } return MultiValueGeneratorHelper .generateMultiValueEntries(_numberOfValuesPerEntry, _random, this::getNextBooleanAsInteger); }
@Test public void testNext() { Random random = mock(Random.class); when(random.nextBoolean()).thenReturn(false, true, false, false, true, true, false, true, false, true); // long generator BooleanGenerator generator = new BooleanGenerator(1.0, random); int[] expectedValues = { // 0, 1, 0, 0, 1, // 1, 0, 1, 0, 1 }; for (int expected : expectedValues) { assertEquals(generator.next(), expected); } }
public void encryptColumns( String inputFile, String outputFile, List<String> paths, FileEncryptionProperties fileEncryptionProperties) throws IOException { Path inPath = new Path(inputFile); Path outPath = new Path(outputFile); RewriteOptions options = new RewriteOptions.Builder(conf, inPath, outPath) .encrypt(paths) .encryptionProperties(fileEncryptionProperties) .build(); ParquetRewriter rewriter = new ParquetRewriter(options); rewriter.processBlocks(); rewriter.close(); }
@Test public void testFlatColumn() throws IOException { String[] encryptColumns = {"DocId"}; testSetup("GZIP"); columnEncryptor.encryptColumns( inputFile.getFileName(), outputFile, Arrays.asList(encryptColumns), EncDecProperties.getFileEncryptionProperties(encryptColumns, ParquetCipher.AES_GCM_CTR_V1, false)); verifyResultDecryptionWithValidKey(); }
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) { CharStream input = CharStreams.fromString(source); FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input ); CommonTokenStream tokens = new CommonTokenStream( lexer ); FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens ); ParserHelper parserHelper = new ParserHelper(eventsManager); additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol())); parser.setHelper(parserHelper); parser.setErrorHandler( new FEELErrorHandler() ); parser.removeErrorListeners(); // removes the error listener that prints to the console parser.addErrorListener( new FEELParserErrorListener( eventsManager ) ); // pre-loads the parser with symbols defineVariables( inputVariableTypes, inputVariables, parser ); if (typeRegistry != null) { parserHelper.setTypeRegistry(typeRegistry); } return parser; }
@Test void booleanTrueLiteral() { String inputExpression = "true"; BaseNode bool = parse( inputExpression ); assertThat( bool).isInstanceOf(BooleanNode.class); assertThat( bool.getResultType()).isEqualTo(BuiltInType.BOOLEAN); assertLocation( inputExpression, bool ); }
public static ConsumingResult createConsumingResult( DataTypeFactory dataTypeFactory, TypeInformation<?> inputTypeInfo, @Nullable Schema declaredSchema) { final DataType inputDataType = TypeInfoDataTypeConverter.toDataType(dataTypeFactory, inputTypeInfo); return createConsumingResult(dataTypeFactory, inputDataType, declaredSchema, true); }
@Test void testInvalidDeclaredSchemaColumn() { final TypeInformation<?> inputTypeInfo = Types.ROW(Types.INT, Types.LONG); assertThatThrownBy( () -> SchemaTranslator.createConsumingResult( dataTypeFactory(), inputTypeInfo, Schema.newBuilder().column("INVALID", BIGINT()).build())) .satisfies( anyCauseMatches( ValidationException.class, "Unable to find a field named 'INVALID' in the physical data type")); }
public static <T> T[] checkNonEmpty(T[] array, String name) { //No String concatenation for check if (checkNotNull(array, name).length == 0) { throw new IllegalArgumentException("Param '" + name + "' must not be empty"); } return array; }
@Test public void testCheckNonEmptyByteArrayString() { Exception actualEx = null; try { ObjectUtil.checkNonEmpty((byte[]) NULL_OBJECT, NULL_NAME); } catch (Exception e) { actualEx = e; } assertNotNull(actualEx, TEST_RESULT_NULLEX_OK); assertTrue(actualEx instanceof NullPointerException, TEST_RESULT_EXTYPE_NOK); actualEx = null; try { ObjectUtil.checkNonEmpty((byte[]) NON_NULL_FILLED_BYTE_ARRAY, NON_NULL_NAME); } catch (Exception e) { actualEx = e; } assertNull(actualEx, TEST_RESULT_NULLEX_NOK); actualEx = null; try { ObjectUtil.checkNonEmpty((byte[]) NON_NULL_EMPTY_BYTE_ARRAY, NON_NULL_EMPTY_NAME); } catch (Exception e) { actualEx = e; } assertNotNull(actualEx, TEST_RESULT_NULLEX_OK); assertTrue(actualEx instanceof IllegalArgumentException, TEST_RESULT_EXTYPE_NOK); }
@VisibleForTesting static void enforceStreamStateDirAvailability(final File streamsStateDir) { if (!streamsStateDir.exists()) { final boolean mkDirSuccess = streamsStateDir.mkdirs(); if (!mkDirSuccess) { throw new KsqlServerException("Could not create the kafka streams state directory: " + streamsStateDir.getPath() + "\n Make sure the directory exists and is writable for KSQL server " + "\n or its parent directory is writable by KSQL server" + "\n or change it to a writable directory by setting '" + KsqlConfig.KSQL_STREAMS_PREFIX + StreamsConfig.STATE_DIR_CONFIG + "' config in the properties file." ); } } if (!streamsStateDir.isDirectory()) { throw new KsqlServerException(streamsStateDir.getPath() + " is not a directory." + "\n Make sure the directory exists and is writable for KSQL server " + "\n or its parent directory is writable by KSQL server" + "\n or change it to a writable directory by setting '" + KsqlConfig.KSQL_STREAMS_PREFIX + StreamsConfig.STATE_DIR_CONFIG + "' config in the properties file." ); } if (!streamsStateDir.canWrite() || !streamsStateDir.canExecute()) { throw new KsqlServerException("The kafka streams state directory is not writable " + "for KSQL server: " + streamsStateDir.getPath() + "\n Make sure the directory exists and is writable for KSQL server " + "\n or change it to a writable directory by setting '" + KsqlConfig.KSQL_STREAMS_PREFIX + StreamsConfig.STATE_DIR_CONFIG + "' config in the properties file." ); } }
@Test public void shouldFailIfStreamsStateDirectoryCannotBeCreated() { // Given: when(mockStreamsStateDir.exists()).thenReturn(false); when(mockStreamsStateDir.mkdirs()).thenReturn(false); // When: final Exception e = assertThrows( KsqlServerException.class, () -> KsqlServerMain.enforceStreamStateDirAvailability(mockStreamsStateDir) ); // Then: assertThat(e.getMessage(), containsString( "Could not create the kafka streams state directory: /var/lib/kafka-streams\n" + " Make sure the directory exists and is writable for KSQL server \n" + " or its parent directory is writable by KSQL server\n" + " or change it to a writable directory by setting 'ksql.streams.state.dir' config in" + " the properties file.")); }
public static MetadataStore create(String metadataURL, MetadataStoreConfig metadataStoreConfig) throws MetadataStoreException { return newInstance(metadataURL, metadataStoreConfig, false); }
@Test public void testCreate() throws Exception{ @Cleanup MetadataStore instance = MetadataStoreFactoryImpl.create( "custom://localhost", MetadataStoreConfig.builder().build()); assertTrue(instance instanceof MyMetadataStore); }
void runOnce() { if (transactionManager != null) { try { transactionManager.maybeResolveSequences(); RuntimeException lastError = transactionManager.lastError(); // do not continue sending if the transaction manager is in a failed state if (transactionManager.hasFatalError()) { if (lastError != null) maybeAbortBatches(lastError); client.poll(retryBackoffMs, time.milliseconds()); return; } if (transactionManager.hasAbortableError() && shouldHandleAuthorizationError(lastError)) { return; } // Check whether we need a new producerId. If so, we will enqueue an InitProducerId // request which will be sent below transactionManager.bumpIdempotentEpochAndResetIdIfNeeded(); if (maybeSendAndPollTransactionalRequest()) { return; } } catch (AuthenticationException e) { // This is already logged as error, but propagated here to perform any clean ups. log.trace("Authentication exception while processing transactional request", e); transactionManager.authenticationFailed(e); } } long currentTimeMs = time.milliseconds(); long pollTimeout = sendProducerData(currentTimeMs); client.poll(pollTimeout, currentTimeMs); }
@Test public void testSequenceNumberIncrement() throws InterruptedException { final long producerId = 343434L; TransactionManager transactionManager = createTransactionManager(); setupWithTransactionState(transactionManager); prepareAndReceiveInitProducerId(producerId, Errors.NONE); assertTrue(transactionManager.hasProducerId()); int maxRetries = 10; Metrics m = new Metrics(); SenderMetricsRegistry senderMetrics = new SenderMetricsRegistry(m); Sender sender = new Sender(logContext, client, metadata, this.accumulator, true, MAX_REQUEST_SIZE, ACKS_ALL, maxRetries, senderMetrics, time, REQUEST_TIMEOUT, RETRY_BACKOFF_MS, transactionManager, apiVersions); Future<RecordMetadata> responseFuture = appendToAccumulator(tp0); client.prepareResponse(body -> { if (body instanceof ProduceRequest) { ProduceRequest request = (ProduceRequest) body; MemoryRecords records = partitionRecords(request).get(tp0); Iterator<MutableRecordBatch> batchIterator = records.batches().iterator(); assertTrue(batchIterator.hasNext()); RecordBatch batch = batchIterator.next(); assertFalse(batchIterator.hasNext()); assertEquals(0, batch.baseSequence()); assertEquals(producerId, batch.producerId()); assertEquals(0, batch.producerEpoch()); return true; } return false; }, produceResponse(tp0, 0, Errors.NONE, 0)); sender.runOnce(); // connect. sender.runOnce(); // send. sender.runOnce(); // receive response assertTrue(responseFuture.isDone()); assertEquals(OptionalInt.of(0), transactionManager.lastAckedSequence(tp0)); assertEquals(1L, transactionManager.sequenceNumber(tp0)); }
@Override public ImportResult importItem( UUID jobId, IdempotentImportExecutor idempotentImportExecutor, TokensAndUrlAuthData authData, PhotosContainerResource data) throws Exception { if (data == null) { return ImportResult.OK; } AppleMediaInterface mediaInterface = factory .getOrCreateMediaInterface(jobId, authData, appCredentials, exportingService, monitor); // Uploads album metadata final int albumCount = mediaInterface.importAlbums( jobId, idempotentImportExecutor, data.getAlbums().stream() .map(MediaAlbum::photoToMediaAlbum) .collect(Collectors.toList()), DataVertical.PHOTOS.getDataType()); final Map<String, Long> importPhotosResult = mediaInterface.importAllMedia( jobId, idempotentImportExecutor, data.getPhotos(), DataVertical.PHOTOS.getDataType()); // generate import result final ImportResult result = ImportResult.OK; final Map<String, Integer> counts = new ImmutableMap.Builder<String, Integer>() .put(PhotosContainerResource.ALBUMS_COUNT_DATA_NAME, albumCount) .put( PhotosContainerResource.PHOTOS_COUNT_DATA_NAME, importPhotosResult.get(ApplePhotosConstants.COUNT_KEY).intValue()) .build(); return result .copyWithBytes(importPhotosResult.get(ApplePhotosConstants.BYTES_KEY)) .copyWithCounts(counts); }
@Test public void importPhotosWithFailure() throws Exception { // set up final int photoCount = ApplePhotosConstants.maxNewMediaRequests + 1; final List<PhotoModel> photos = createTestPhotos(photoCount); // different errors in different steps final int errorCountGetUploadURL = 10; final int errorCountUploadContent = 10; final int errorCountCreateMedia = 10; final int successCount = photoCount - errorCountGetUploadURL - errorCountUploadContent - errorCountCreateMedia; final List<String> dataIds = photos.stream().map(PhotoModel::getDataId).collect(Collectors.toList()); final Map<String, Integer> datatIdToGetUploadURLStatus = setUpErrors(dataIds, 0, errorCountGetUploadURL); final Map<String, Integer> datatIdToUploadContentStatus = setUpErrors(dataIds, errorCountGetUploadURL, errorCountUploadContent); final Map<String, Integer> datatIdToCreateMediaStatus = setUpErrors( dataIds, errorCountGetUploadURL + errorCountUploadContent, errorCountCreateMedia); setUpGetUploadUrlResponse(datatIdToGetUploadURLStatus); setUpUploadContentResponse(datatIdToUploadContentStatus); setUpCreateMediaResponse(datatIdToCreateMediaStatus); // run test PhotosContainerResource data = new PhotosContainerResource(null, photos); ImportResult importResult = applePhotosImporter.importItem(uuid, executor, authData, data); // verify correct methods were called verify(mediaInterface, times(2)).getUploadUrl(anyString(), anyString(), anyList()); verify(mediaInterface) .getUploadUrl( uuid.toString(), DataVertical.PHOTOS.getDataType(), photos.subList(0, ApplePhotosConstants.maxNewMediaRequests).stream() .map(PhotoModel::getDataId) .collect(Collectors.toList())); verify(mediaInterface) .getUploadUrl( uuid.toString(), DataVertical.PHOTOS.getDataType(), photos.subList(ApplePhotosConstants.maxNewMediaRequests, photoCount).stream() .map(PhotoModel::getDataId) .collect(Collectors.toList())); verify(mediaInterface, times(2)).uploadContent(anyMap(), anyList()); verify(mediaInterface, times(2)).createMedia(anyString(), anyString(), anyList()); // check the result assertThat(importResult.getCounts().isPresent()).isTrue(); assertThat(importResult.getCounts().get().get(ALBUMS_COUNT_DATA_NAME) == 0).isTrue(); assertThat(importResult.getCounts().get().get(PHOTOS_COUNT_DATA_NAME) == successCount).isTrue(); assertThat(importResult.getBytes().get() == successCount * PHOTOS_FILE_SIZE).isTrue(); final Map<String, Serializable> expectedKnownValue = photos.stream() .filter( photoModel -> datatIdToGetUploadURLStatus.get(photoModel.getDataId()) == SC_OK) .filter( photoModel -> datatIdToUploadContentStatus.get(photoModel.getDataId()) == SC_OK) .filter( photoModel -> datatIdToCreateMediaStatus.get(photoModel.getDataId()) == SC_OK) .collect( Collectors.toMap( photoModel -> photoModel.getAlbumId() + "-" + photoModel.getDataId(), photoModel -> MEDIA_RECORDID_BASE + photoModel.getDataId())); checkKnownValues(expectedKnownValue); // check errors List<ErrorDetail> expectedErrors = new ArrayList<>(); for (int i = 0; i < errorCountGetUploadURL + errorCountUploadContent + errorCountCreateMedia; i++) { final PhotoModel photoModel = photos.get(i); final ErrorDetail.Builder errorDetailBuilder = ErrorDetail.builder() .setId(photoModel.getIdempotentId()) .setTitle(photoModel.getTitle()) .setException( String.format( "java.io.IOException: %s Fail to get upload url, errorCode:%d", ApplePhotosConstants.APPLE_PHOTOS_IMPORT_ERROR_PREFIX, SC_INTERNAL_SERVER_ERROR)); if (i < errorCountGetUploadURL) { errorDetailBuilder.setException( String.format( "java.io.IOException: %s Fail to get upload url, errorCode:%d", ApplePhotosConstants.APPLE_PHOTOS_IMPORT_ERROR_PREFIX, SC_INTERNAL_SERVER_ERROR)); } else if (i < errorCountGetUploadURL + errorCountGetUploadURL) { errorDetailBuilder.setException(String.format("java.io.IOException: %s Fail to upload content", ApplePhotosConstants.APPLE_PHOTOS_IMPORT_ERROR_PREFIX)); } else { errorDetailBuilder.setException( String.format( "java.io.IOException: %s Fail to create media, errorCode:%d", ApplePhotosConstants.APPLE_PHOTOS_IMPORT_ERROR_PREFIX, SC_INTERNAL_SERVER_ERROR)); } expectedErrors.add(errorDetailBuilder.build()); } checkErrors(expectedErrors); checkRecentErrors(expectedErrors); }
public static TimestampRange of(Timestamp from, Timestamp to) { return new TimestampRange(from, to); }
@Test(expected = IllegalArgumentException.class) public void testTimestampRangeWhenFromIsGreaterThanTo() { TimestampRange.of(Timestamp.ofTimeMicroseconds(11L), Timestamp.ofTimeMicroseconds(10L)); }
public static Mode parse(String value) { if (StringUtils.isBlank(value)) { throw new IllegalArgumentException(ExceptionMessage.INVALID_MODE.getMessage(value)); } try { return parseNumeric(value); } catch (NumberFormatException e) { // Treat as symbolic return parseSymbolic(value); } }
@Test public void symbolicsBadPermissions() { mThrown.expect(IllegalArgumentException.class); mThrown.expectMessage(ExceptionMessage.INVALID_MODE_SEGMENT .getMessage("u=Xst", "u=Xst", "Xst")); ModeParser.parse("u=Xst"); }
@CanIgnoreReturnValue public final Ordered containsExactly(@Nullable Object @Nullable ... varargs) { List<@Nullable Object> expected = (varargs == null) ? newArrayList((@Nullable Object) null) : asList(varargs); return containsExactlyElementsIn( expected, varargs != null && varargs.length == 1 && varargs[0] instanceof Iterable); }
@Test public void iterableContainsExactlyFailsWithSameToStringAndHomogeneousListWithDuplicates() { expectFailureWhenTestingThat(asList(1L, 2L)).containsExactly(1, 2, 2); assertFailureValue("missing (3)", "1, 2 [2 copies] (java.lang.Integer)"); assertFailureValue("unexpected (2)", "1, 2 (java.lang.Long)"); }
@Override public OAuth2ClientDO validOAuthClientFromCache(String clientId, String clientSecret, String authorizedGrantType, Collection<String> scopes, String redirectUri) { // 校验客户端存在、且开启 OAuth2ClientDO client = getSelf().getOAuth2ClientFromCache(clientId); if (client == null) { throw exception(OAUTH2_CLIENT_NOT_EXISTS); } if (CommonStatusEnum.isDisable(client.getStatus())) { throw exception(OAUTH2_CLIENT_DISABLE); } // 校验客户端密钥 if (StrUtil.isNotEmpty(clientSecret) && ObjectUtil.notEqual(client.getSecret(), clientSecret)) { throw exception(OAUTH2_CLIENT_CLIENT_SECRET_ERROR); } // 校验授权方式 if (StrUtil.isNotEmpty(authorizedGrantType) && !CollUtil.contains(client.getAuthorizedGrantTypes(), authorizedGrantType)) { throw exception(OAUTH2_CLIENT_AUTHORIZED_GRANT_TYPE_NOT_EXISTS); } // 校验授权范围 if (CollUtil.isNotEmpty(scopes) && !CollUtil.containsAll(client.getScopes(), scopes)) { throw exception(OAUTH2_CLIENT_SCOPE_OVER); } // 校验回调地址 if (StrUtil.isNotEmpty(redirectUri) && !StrUtils.startWithAny(redirectUri, client.getRedirectUris())) { throw exception(OAUTH2_CLIENT_REDIRECT_URI_NOT_MATCH, redirectUri); } return client; }
@Test public void testValidOAuthClientFromCache() { try (MockedStatic<SpringUtil> springUtilMockedStatic = mockStatic(SpringUtil.class)) { springUtilMockedStatic.when(() -> SpringUtil.getBean(eq(OAuth2ClientServiceImpl.class))) .thenReturn(oauth2ClientService); // mock 方法 OAuth2ClientDO client = randomPojo(OAuth2ClientDO.class).setClientId("default") .setStatus(CommonStatusEnum.ENABLE.getStatus()); oauth2ClientMapper.insert(client); OAuth2ClientDO client02 = randomPojo(OAuth2ClientDO.class).setClientId("disable") .setStatus(CommonStatusEnum.DISABLE.getStatus()); oauth2ClientMapper.insert(client02); // 调用,并断言 assertServiceException(() -> oauth2ClientService.validOAuthClientFromCache(randomString(), null, null, null, null), OAUTH2_CLIENT_NOT_EXISTS); assertServiceException(() -> oauth2ClientService.validOAuthClientFromCache("disable", null, null, null, null), OAUTH2_CLIENT_DISABLE); assertServiceException(() -> oauth2ClientService.validOAuthClientFromCache("default", randomString(), null, null, null), OAUTH2_CLIENT_CLIENT_SECRET_ERROR); assertServiceException(() -> oauth2ClientService.validOAuthClientFromCache("default", null, randomString(), null, null), OAUTH2_CLIENT_AUTHORIZED_GRANT_TYPE_NOT_EXISTS); assertServiceException(() -> oauth2ClientService.validOAuthClientFromCache("default", null, null, Collections.singleton(randomString()), null), OAUTH2_CLIENT_SCOPE_OVER); assertServiceException(() -> oauth2ClientService.validOAuthClientFromCache("default", null, null, null, "test"), OAUTH2_CLIENT_REDIRECT_URI_NOT_MATCH, "test"); // 成功调用(1:参数完整) OAuth2ClientDO result = oauth2ClientService.validOAuthClientFromCache(client.getClientId(), client.getSecret(), client.getAuthorizedGrantTypes().get(0), client.getScopes(), client.getRedirectUris().get(0)); assertPojoEquals(client, result); // 成功调用(2:只有 clientId 参数) result = oauth2ClientService.validOAuthClientFromCache(client.getClientId()); assertPojoEquals(client, result); } }
public HttpResponseDecoderSpec failOnMissingResponse(boolean failOnMissingResponse) { this.failOnMissingResponse = failOnMissingResponse; return this; }
@Test void failOnMissingResponse() { checkDefaultFailOnMissingResponse(conf); conf.failOnMissingResponse(true); assertThat(conf.failOnMissingResponse).as("fail on missing response").isTrue(); checkDefaultMaxInitialLineLength(conf); checkDefaultMaxHeaderSize(conf); checkDefaultMaxChunkSize(conf); checkDefaultValidateHeaders(conf); checkDefaultInitialBufferSize(conf); checkDefaultAllowDuplicateContentLengths(conf); checkDefaultParseHttpAfterConnectRequest(conf); checkDefaultH2cMaxContentLength(conf); }
@Override public String version() { return AppInfoParser.getVersion(); }
@Test public void testVersionRetrievedFromAppInfoParser() { assertEquals(AppInfoParser.getVersion(), converter.version()); }
public Searcher searcher() { return new Searcher(); }
@Test void require_that_not_works_when_k_is_0() { ConjunctionIndexBuilder builder = new ConjunctionIndexBuilder(); IndexableFeatureConjunction c1 = indexableConj( conj( not(feature("a").inSet("1")), not(feature("b").inSet("1")))); IndexableFeatureConjunction c2 = indexableConj( conj( not(feature("a").inSet("1")), not(feature("b").inSet("1")), not(feature("c").inSet("1")))); IndexableFeatureConjunction c3 = indexableConj( conj( not(feature("a").inSet("1")), not(feature("b").inSet("1")), not(feature("c").inSet("1")), not(feature("d").inSet("1")))); IndexableFeatureConjunction c4 = indexableConj( conj( feature("a").inSet("1"), feature("b").inSet("1"))); builder.indexConjunction(c1); builder.indexConjunction(c2); builder.indexConjunction(c3); builder.indexConjunction(c4); ConjunctionIndex index = builder.build(); ConjunctionIndex.Searcher searcher = index.searcher(); PredicateQuery query = new PredicateQuery(); assertHitsEquals(searcher.search(query), c1, c2, c3); query.addFeature("a", "1"); query.addFeature("b", "1"); assertHitsEquals(searcher.search(query), c4); query.addFeature("c", "1"); assertHitsEquals(searcher.search(query), c4); query.addFeature("d", "1"); assertHitsEquals(searcher.search(query), c4); }