focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Bean("EsClient") public EsClient provide(Configuration config) { Settings.Builder esSettings = Settings.builder(); // mandatory property defined by bootstrap process esSettings.put("cluster.name", config.get(CLUSTER_NAME.getKey()).get()); boolean clusterEnabled = config.getBoolean(CLUSTER_ENABLED.getKey()).orElse(false); boolean searchNode = !clusterEnabled || SEARCH.equals(NodeType.parse(config.get(CLUSTER_NODE_TYPE.getKey()).orElse(null))); List<HttpHost> httpHosts; if (clusterEnabled && !searchNode) { httpHosts = getHttpHosts(config); LOGGER.info("Connected to remote Elasticsearch: [{}]", displayedAddresses(httpHosts)); } else { // defaults provided in: // * in org.sonar.process.ProcessProperties.Property.SEARCH_HOST // * in org.sonar.process.ProcessProperties.Property.SEARCH_PORT HostAndPort host = HostAndPort.fromParts(config.get(SEARCH_HOST.getKey()).get(), config.getInt(SEARCH_PORT.getKey()).get()); httpHosts = Collections.singletonList(toHttpHost(host, config)); LOGGER.info("Connected to local Elasticsearch: [{}]", displayedAddresses(httpHosts)); } return new EsClient(config.get(CLUSTER_SEARCH_PASSWORD.getKey()).orElse(null), config.get(CLUSTER_ES_HTTP_KEYSTORE.getKey()).orElse(null), config.get(CLUSTER_ES_HTTP_KEYSTORE_PASSWORD.getKey()).orElse(null), httpHosts.toArray(new HttpHost[0])); }
@Test public void es_client_provider_must_add_default_port_when_not_specified() { settings.setProperty(CLUSTER_ENABLED.getKey(), true); settings.setProperty(CLUSTER_NODE_TYPE.getKey(), "application"); settings.setProperty(CLUSTER_SEARCH_HOSTS.getKey(), format("%s,%s:8081", localhostHostname, localhostHostname)); EsClient client = underTest.provide(settings.asConfig()); RestHighLevelClient nativeClient = client.nativeClient(); assertThat(nativeClient.getLowLevelClient().getNodes()).hasSize(2); Node node = nativeClient.getLowLevelClient().getNodes().get(0); assertThat(node.getHost().getAddress().getHostName()).isEqualTo(localhostHostname); assertThat(node.getHost().getPort()).isEqualTo(9001); node = nativeClient.getLowLevelClient().getNodes().get(1); assertThat(node.getHost().getAddress().getHostName()).isEqualTo(localhostHostname); assertThat(node.getHost().getPort()).isEqualTo(8081); assertThat(logTester.logs(Level.INFO)) .has(new Condition<>(s -> s.contains("Connected to remote Elasticsearch: [http://" + localhostHostname + ":9001, http://" + localhostHostname + ":8081]"), "")); }
public void clear() { aclsToAdd.clear(); aclsToRemove.clear(); masterAvailableRoleList.clear(); masterAvailableUserList.clear(); availableRoleList.clear(); availableUserList.clear(); selectedAvailableRoles.clear(); selectedAvailableUsers.clear(); selectedAssignedRoles.clear(); selectedAssignedUsers.clear(); setRoleAssignmentPossible( false ); setRoleUnassignmentPossible( false ); setUserAssignmentPossible( false ); setUserUnassignmentPossible( false ); }
@Test public void testClear() { repositoryObjectAcls.addAcl( new UIRepositoryObjectAcl( createUserAce( USER1 ) ) ); repositoryObjectAcls.addAcl( new UIRepositoryObjectAcl( createRoleAce( ROLE1 ) ) ); repositoryObjectAclModel.setAclsList( defaultUserNameList, defaultRoleNameList ); repositoryObjectAclModel.assignRoles( Arrays.asList( new Object[] { ROLE2 } ) ); repositoryObjectAclModel.assignUsers( Arrays.asList( new Object[] { USER2 } ) ); repositoryObjectAclModel.clear(); assertEquals( 0, repositoryObjectAclModel.getAvailableUserList().size() ); assertEquals( 0, repositoryObjectAclModel.getAvailableRoleList().size() ); assertEquals( 0, repositoryObjectAclModel.getSelectedAssignedUsers().size() ); assertEquals( 0, repositoryObjectAclModel.getSelectedAssignedRoles().size() ); assertEquals( 0, repositoryObjectAclModel.getSelectedAvailableUsers().size() ); assertEquals( 0, repositoryObjectAclModel.getSelectedAvailableRoles().size() ); // Selected List is unchanged. assertEquals( 1, repositoryObjectAclModel.getSelectedUserList().size() ); assertEquals( 1, repositoryObjectAclModel.getSelectedRoleList().size() ); }
public boolean isLocked() { return lockOwner != null; }
@Test public void testIsLocked() throws Exception { LockGuard stateLock = LockGuard.NOT_LOCKED; assertFalse(stateLock.isLocked()); Address endpoint = newAddress(); stateLock = new LockGuard(endpoint, TXN, 1000); assertTrue(stateLock.isLocked()); }
public static Read read() { return new AutoValue_MongoDbIO_Read.Builder() .setMaxConnectionIdleTime(60000) .setNumSplits(0) .setBucketAuto(false) .setSslEnabled(false) .setIgnoreSSLCertificate(false) .setSslInvalidHostNameAllowed(false) .setQueryFn(FindQuery.create()) .build(); }
@Test public void testBuildAutoBuckets() { List<BsonDocument> aggregates = new ArrayList<BsonDocument>(); aggregates.add( new BsonDocument( "$match", new BsonDocument("country", new BsonDocument("$eq", new BsonString("England"))))); MongoDbIO.Read spec = MongoDbIO.read() .withUri("mongodb://localhost:" + port) .withDatabase(DATABASE_NAME) .withCollection(COLLECTION_NAME) .withQueryFn(AggregationQuery.create().withMongoDbPipeline(aggregates)); List<Document> buckets = MongoDbIO.BoundedMongoDbSource.buildAutoBuckets(database, spec); assertEquals(10, buckets.size()); }
@Override public CompletableFuture<Void> process() { return CompletableFuture.runAsync( () -> { try (WriteBatch writeBatch = new WriteBatch(batchRequest.size() * PER_RECORD_ESTIMATE_BYTES)) { for (ForStDBPutRequest<?, ?, ?> request : batchRequest) { if (request.valueIsNull()) { // put(key, null) == delete(key) writeBatch.delete( request.getColumnFamilyHandle(), request.buildSerializedKey()); } else { writeBatch.put( request.getColumnFamilyHandle(), request.buildSerializedKey(), request.buildSerializedValue()); } } db.write(writeOptions, writeBatch); for (ForStDBPutRequest<?, ?, ?> request : batchRequest) { request.completeStateFuture(); } } catch (Exception e) { String msg = "Error while write batch data to ForStDB."; for (ForStDBPutRequest<?, ?, ?> request : batchRequest) { // fail every state request in this batch request.completeStateFutureExceptionally(msg, e); } // fail the whole batch operation throw new CompletionException(msg, e); } }, executor); }
@Test public void testWriteBatchWithNullValue() throws Exception { ForStValueState<Integer, VoidNamespace, String> valueState = buildForStValueState("test-write-batch"); List<ForStDBPutRequest<?, ?, ?>> batchPutRequest = new ArrayList<>(); // 1. write some data without null value int keyNum = 100; for (int i = 0; i < keyNum; i++) { batchPutRequest.add( ForStDBPutRequest.of( buildContextKey(i), String.valueOf(i), valueState, new TestStateFuture<>())); } ExecutorService executor = Executors.newFixedThreadPool(2); ForStWriteBatchOperation writeBatchOperation = new ForStWriteBatchOperation(db, batchPutRequest, new WriteOptions(), executor); writeBatchOperation.process().get(); // 2. update data with null value batchPutRequest.clear(); for (int i = 0; i < keyNum; i++) { if (i % 8 == 0) { batchPutRequest.add( ForStDBPutRequest.of( buildContextKey(i), null, valueState, new TestStateFuture<>())); } else { batchPutRequest.add( ForStDBPutRequest.of( buildContextKey(i), String.valueOf(i * 2), valueState, new TestStateFuture<>())); } } ForStWriteBatchOperation writeBatchOperation2 = new ForStWriteBatchOperation(db, batchPutRequest, new WriteOptions(), executor); writeBatchOperation2.process().get(); // 3. check data correctness for (ForStDBPutRequest<?, ?, ?> request : batchPutRequest) { byte[] keyBytes = request.buildSerializedKey(); byte[] valueBytes = db.get(request.getColumnFamilyHandle(), keyBytes); if (valueBytes == null) { assertTrue(request.valueIsNull()); } else { assertArrayEquals(valueBytes, request.buildSerializedValue()); } } }
public NioAsyncSocketBuilder setDirectBuffers(boolean directBuffers) { verifyNotBuilt(); this.directBuffers = directBuffers; return this; }
@Test public void test_setReceiveBufferIsDirect() { Reactor reactor = newReactor(); NioAsyncSocketBuilder builder = (NioAsyncSocketBuilder) reactor.newAsyncSocketBuilder(); builder.setDirectBuffers(false); assertFalse(builder.directBuffers); builder.setDirectBuffers(true); assertTrue(builder.directBuffers); }
public static List<Integer> sequence(final int start, int end, final int step) { final int size = (end - start) / step; if (size < 0) throw new IllegalArgumentException("List size is negative"); return new AbstractList<>() { @Override public Integer get(int index) { if (index < 0 || index >= size) throw new IndexOutOfBoundsException(); return start + index * step; } @Override public int size() { return size; } }; }
@Test public void sequence() { List<Integer> lst = Iterators.sequence(1, 4); assertEquals(1, (int) lst.get(0)); assertEquals(2, (int) lst.get(1)); assertEquals(3, (int) lst.get(2)); assertEquals(3, lst.size()); }
void fetchAndRunCommands() { lastPollTime.set(clock.instant()); final List<QueuedCommand> commands = commandStore.getNewCommands(NEW_CMDS_TIMEOUT); if (commands.isEmpty()) { if (!commandTopicExists.get()) { commandTopicDeleted = true; } return; } final List<QueuedCommand> compatibleCommands = checkForIncompatibleCommands(commands); final Optional<QueuedCommand> terminateCmd = findTerminateCommand(compatibleCommands, commandDeserializer); if (terminateCmd.isPresent()) { terminateCluster(terminateCmd.get().getAndDeserializeCommand(commandDeserializer)); return; } LOG.debug("Found {} new writes to command topic", compatibleCommands.size()); for (final QueuedCommand command : compatibleCommands) { if (closed) { return; } executeStatement(command); } }
@Test public void shouldNotBlockIndefinitelyPollingForNewCommands() { // When: commandRunner.fetchAndRunCommands(); // Then: verify(commandStore).getNewCommands(argThat(not(Duration.ofMillis(Long.MAX_VALUE)))); }
public static Optional<Expression> convert( org.apache.flink.table.expressions.Expression flinkExpression) { if (!(flinkExpression instanceof CallExpression)) { return Optional.empty(); } CallExpression call = (CallExpression) flinkExpression; Operation op = FILTERS.get(call.getFunctionDefinition()); if (op != null) { switch (op) { case IS_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::isNull); case NOT_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::notNull); case LT: return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call); case LT_EQ: return convertFieldAndLiteral( Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call); case GT: return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call); case GT_EQ: return convertFieldAndLiteral( Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call); case EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.isNaN(ref); } else { return Expressions.equal(ref, lit); } }, call); case NOT_EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.notNaN(ref); } else { return Expressions.notEqual(ref, lit); } }, call); case NOT: return onlyChildAs(call, CallExpression.class) .flatMap(FlinkFilters::convert) .map(Expressions::not); case AND: return convertLogicExpression(Expressions::and, call); case OR: return convertLogicExpression(Expressions::or, call); case STARTS_WITH: return convertLike(call); } } return Optional.empty(); }
@Test public void testNot() { Expression expr = resolve( ApiExpressionUtils.unresolvedCall( BuiltInFunctionDefinitions.NOT, Expressions.$("field1").isEqual(Expressions.lit(1)))); Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert(expr); assertThat(actual).isPresent(); Not not = (Not) actual.get(); Not expected = (Not) org.apache.iceberg.expressions.Expressions.not( org.apache.iceberg.expressions.Expressions.equal("field1", 1)); assertThat(not.op()).as("Predicate operation should match").isEqualTo(expected.op()); assertPredicatesMatch(expected.child(), not.child()); }
public void setDensity(int density) { if (density < 150 || density > 1200) { throw new IllegalArgumentException( "Invalid density value. Valid range of values is 150-1200."); } this.density = density; userConfigured.add("density"); }
@Test public void testValidateDensity() { TesseractOCRConfig config = new TesseractOCRConfig(); config.setDensity(300); config.setDensity(400); assertTrue(true, "Couldn't set valid values"); assertThrows(IllegalArgumentException.class, () -> { config.setDensity(1); }); }
String format(Method method) { String signature = method.toGenericString(); Matcher matcher = METHOD_PATTERN.matcher(signature); if (matcher.find()) { String qc = matcher.group(3); String m = matcher.group(4); String qa = matcher.group(5); return format.format(new Object[] { qc, m, qa, }); } else { throw new CucumberBackendException("Cucumber bug: Couldn't format " + signature); } }
@Test void shouldUseSimpleFormatWhenMethodHasNoException() { assertThat(MethodFormat.FULL.format(methodWithArgsAndException), startsWith("io.cucumber.java.MethodFormatTest.methodWithArgsAndException(java.lang.String,java.util.Map)")); }
@VisibleForTesting List<KeyValue<String, Object>> buildTemplateParams(SmsTemplateDO template, Map<String, Object> templateParams) { return template.getParams().stream().map(key -> { Object value = templateParams.get(key); if (value == null) { throw exception(SMS_SEND_MOBILE_TEMPLATE_PARAM_MISS, key); } return new KeyValue<>(key, value); }).collect(Collectors.toList()); }
@Test public void testBuildTemplateParams_paramMiss() { // 准备参数 SmsTemplateDO template = randomPojo(SmsTemplateDO.class, o -> o.setParams(Lists.newArrayList("code"))); Map<String, Object> templateParams = new HashMap<>(); // mock 方法 // 调用,并断言异常 assertServiceException(() -> smsSendService.buildTemplateParams(template, templateParams), SMS_SEND_MOBILE_TEMPLATE_PARAM_MISS, "code"); }
@Override public String probeContentType(Path path) throws IOException { // Try to detect based on the file name only for efficiency String fileNameDetect = tika.detect(path.toString()); if (!fileNameDetect.equals(MimeTypes.OCTET_STREAM)) { return fileNameDetect; } // Then check the file content if necessary String fileContentDetect = tika.detect(path); if (!fileContentDetect.equals(MimeTypes.OCTET_STREAM)) { return fileContentDetect; } // Specification says to return null if we could not // conclusively determine the file type return null; }
@Test public final void testFilesProbeContentTypePathUnrecognised() throws Exception { String contentType = Files.probeContentType(testDirectory.resolve(TEST_UNRECOGNISED_EXTENSION)); assertNotNull(contentType); assertEquals("text/html", contentType); }
public T subtract(T other) { checkNotNull(other, "Cannot subtract null resources"); checkArgument(getClass() == other.getClass(), "Minus with different resource type"); checkArgument(name.equals(other.getName()), "Minus with different resource name"); checkArgument( value.compareTo(other.getValue()) >= 0, "Try to subtract a larger resource from this one."); return create(value.subtract(other.getValue())); }
@Test void testSubtractErrorOnDifferentTypes() { final Resource v1 = new TestResource(0.1); final Resource v2 = new CPUResource(0.1); assertThatThrownBy(() -> v1.subtract(v2)).isInstanceOf(IllegalArgumentException.class); }
@Override public FSDataOutputStream build() throws IOException { Path path = getPath(); final Configuration options = getOptions(); final Map<String, String> headers = new HashMap<>(); final Set<String> mandatoryKeys = getMandatoryKeys(); final Set<String> keysToValidate = new HashSet<>(); // pick up all headers from the mandatory list and strip them before // validating the keys String headerPrefix = FS_S3A_CREATE_HEADER + "."; final int prefixLen = headerPrefix.length(); mandatoryKeys.stream().forEach(key -> { if (key.startsWith(headerPrefix) && key.length() > prefixLen) { headers.put(key.substring(prefixLen), options.get(key)); } else { keysToValidate.add(key); } }); rejectUnknownMandatoryKeys(keysToValidate, CREATE_FILE_KEYS, "for " + path); // and add any optional headers getOptionalKeys().stream() .filter(key -> key.startsWith(headerPrefix) && key.length() > prefixLen) .forEach(key -> headers.put(key.substring(prefixLen), options.get(key))); EnumSet<CreateFlag> flags = getFlags(); if (flags.contains(CreateFlag.APPEND)) { throw new UnsupportedOperationException("Append is not supported"); } if (!flags.contains(CreateFlag.CREATE) && !flags.contains(CreateFlag.OVERWRITE)) { throw new PathIOException(path.toString(), "Must specify either create or overwrite"); } final boolean performance = options.getBoolean(Constants.FS_S3A_CREATE_PERFORMANCE, false); return callbacks.createFileFromBuilder( path, getProgress(), new CreateFileOptions(flags, isRecursive(), performance, headers)); }
@Test public void testHeaderOptions() throws Throwable { final CreateFileBuilder builder = mkBuilder().create() .must(FS_S3A_CREATE_HEADER + ".retention", "permanent") .opt(FS_S3A_CREATE_HEADER + ".owner", "engineering"); final Map<String, String> headers = build(builder).getHeaders(); Assertions.assertThat(headers) .containsEntry("retention", "permanent") .containsEntry("owner", "engineering"); }
public static AggregationFunction getAggregationFunction(FunctionContext function, boolean nullHandlingEnabled) { try { String upperCaseFunctionName = AggregationFunctionType.getNormalizedAggregationFunctionName(function.getFunctionName()); List<ExpressionContext> arguments = function.getArguments(); int numArguments = arguments.size(); ExpressionContext firstArgument = arguments.get(0); if (upperCaseFunctionName.startsWith("PERCENTILE")) { String remainingFunctionName = upperCaseFunctionName.substring(10); if (remainingFunctionName.equals("SMARTTDIGEST")) { return new PercentileSmartTDigestAggregationFunction(arguments, nullHandlingEnabled); } if (remainingFunctionName.equals("KLL")) { return new PercentileKLLAggregationFunction(arguments, nullHandlingEnabled); } if (remainingFunctionName.equals("KLLMV")) { return new PercentileKLLMVAggregationFunction(arguments); } if (remainingFunctionName.equals("RAWKLL")) { return new PercentileRawKLLAggregationFunction(arguments, nullHandlingEnabled); } if (remainingFunctionName.equals("RAWKLLMV")) { return new PercentileRawKLLMVAggregationFunction(arguments); } if (numArguments == 1) { // Single argument percentile (e.g. Percentile99(foo), PercentileTDigest95(bar), etc.) // NOTE: This convention is deprecated. DO NOT add new functions here if (remainingFunctionName.matches("\\d+")) { // Percentile return new PercentileAggregationFunction(firstArgument, parsePercentileToInt(remainingFunctionName), nullHandlingEnabled); } else if (remainingFunctionName.matches("EST\\d+")) { // PercentileEst String percentileString = remainingFunctionName.substring(3); return new PercentileEstAggregationFunction(firstArgument, parsePercentileToInt(percentileString), nullHandlingEnabled); } else if (remainingFunctionName.matches("RAWEST\\d+")) { // PercentileRawEst String percentileString = remainingFunctionName.substring(6); return new PercentileRawEstAggregationFunction(firstArgument, parsePercentileToInt(percentileString), nullHandlingEnabled); } else if (remainingFunctionName.matches("TDIGEST\\d+")) { // PercentileTDigest String percentileString = remainingFunctionName.substring(7); return new PercentileTDigestAggregationFunction(firstArgument, parsePercentileToInt(percentileString), nullHandlingEnabled); } else if (remainingFunctionName.matches("RAWTDIGEST\\d+")) { // PercentileRawTDigest String percentileString = remainingFunctionName.substring(10); return new PercentileRawTDigestAggregationFunction(firstArgument, parsePercentileToInt(percentileString), nullHandlingEnabled); } else if (remainingFunctionName.matches("\\d+MV")) { // PercentileMV String percentileString = remainingFunctionName.substring(0, remainingFunctionName.length() - 2); return new PercentileMVAggregationFunction(firstArgument, parsePercentileToInt(percentileString)); } else if (remainingFunctionName.matches("EST\\d+MV")) { // PercentileEstMV String percentileString = remainingFunctionName.substring(3, remainingFunctionName.length() - 2); return new PercentileEstMVAggregationFunction(firstArgument, parsePercentileToInt(percentileString)); } else if (remainingFunctionName.matches("RAWEST\\d+MV")) { // PercentileRawEstMV String percentileString = remainingFunctionName.substring(6, remainingFunctionName.length() - 2); return new PercentileRawEstMVAggregationFunction(firstArgument, parsePercentileToInt(percentileString)); } else if (remainingFunctionName.matches("TDIGEST\\d+MV")) { // PercentileTDigestMV String percentileString = remainingFunctionName.substring(7, remainingFunctionName.length() - 2); return new PercentileTDigestMVAggregationFunction(firstArgument, parsePercentileToInt(percentileString)); } else if (remainingFunctionName.matches("RAWTDIGEST\\d+MV")) { // PercentileRawTDigestMV String percentileString = remainingFunctionName.substring(10, remainingFunctionName.length() - 2); return new PercentileRawTDigestMVAggregationFunction(firstArgument, parsePercentileToInt(percentileString)); } } else if (numArguments == 2) { // Double arguments percentile (e.g. percentile(foo, 99), percentileTDigest(bar, 95), etc.) where the // second argument is a decimal number from 0.0 to 100.0. double percentile = arguments.get(1).getLiteral().getDoubleValue(); Preconditions.checkArgument(percentile >= 0 && percentile <= 100, "Invalid percentile: %s", percentile); if (remainingFunctionName.isEmpty()) { // Percentile return new PercentileAggregationFunction(firstArgument, percentile, nullHandlingEnabled); } if (remainingFunctionName.equals("EST")) { // PercentileEst return new PercentileEstAggregationFunction(firstArgument, percentile, nullHandlingEnabled); } if (remainingFunctionName.equals("RAWEST")) { // PercentileRawEst return new PercentileRawEstAggregationFunction(firstArgument, percentile, nullHandlingEnabled); } if (remainingFunctionName.equals("TDIGEST")) { // PercentileTDigest return new PercentileTDigestAggregationFunction(firstArgument, percentile, nullHandlingEnabled); } if (remainingFunctionName.equals("RAWTDIGEST")) { // PercentileRawTDigest return new PercentileRawTDigestAggregationFunction(firstArgument, percentile, nullHandlingEnabled); } if (remainingFunctionName.equals("MV")) { // PercentileMV return new PercentileMVAggregationFunction(firstArgument, percentile); } if (remainingFunctionName.equals("ESTMV")) { // PercentileEstMV return new PercentileEstMVAggregationFunction(firstArgument, percentile); } if (remainingFunctionName.equals("RAWESTMV")) { // PercentileRawEstMV return new PercentileRawEstMVAggregationFunction(firstArgument, percentile); } if (remainingFunctionName.equals("TDIGESTMV")) { // PercentileTDigestMV return new PercentileTDigestMVAggregationFunction(firstArgument, percentile); } if (remainingFunctionName.equals("RAWTDIGESTMV")) { // PercentileRawTDigestMV return new PercentileRawTDigestMVAggregationFunction(firstArgument, percentile); } } else if (numArguments == 3) { // Triple arguments percentile (e.g. percentileTDigest(bar, 95, 1000), etc.) where the // second argument is a decimal number from 0.0 to 100.0 and third argument is a decimal number indicating // the compression_factor for the TDigest. This can only be used for TDigest type percentile functions to // pass in a custom compression_factor. If the two argument version is used the default compression_factor // of 100.0 is used. double percentile = arguments.get(1).getLiteral().getDoubleValue(); Preconditions.checkArgument(percentile >= 0 && percentile <= 100, "Invalid percentile: %s", percentile); int compressionFactor = arguments.get(2).getLiteral().getIntValue(); Preconditions.checkArgument(compressionFactor >= 0, "Invalid compressionFactor: %d", compressionFactor); if (remainingFunctionName.equals("TDIGEST")) { // PercentileTDigest return new PercentileTDigestAggregationFunction(firstArgument, percentile, compressionFactor, nullHandlingEnabled); } if (remainingFunctionName.equals("RAWTDIGEST")) { // PercentileRawTDigest return new PercentileRawTDigestAggregationFunction(firstArgument, percentile, compressionFactor, nullHandlingEnabled); } if (remainingFunctionName.equals("TDIGESTMV")) { // PercentileTDigestMV return new PercentileTDigestMVAggregationFunction(firstArgument, percentile, compressionFactor); } if (remainingFunctionName.equals("RAWTDIGESTMV")) { // PercentileRawTDigestMV return new PercentileRawTDigestMVAggregationFunction(firstArgument, percentile, compressionFactor); } } throw new IllegalArgumentException("Invalid percentile function: " + function); } else { AggregationFunctionType functionType = AggregationFunctionType.valueOf(upperCaseFunctionName); switch (functionType) { case COUNT: return new CountAggregationFunction(arguments, nullHandlingEnabled); case MIN: return new MinAggregationFunction(arguments, nullHandlingEnabled); case MAX: return new MaxAggregationFunction(arguments, nullHandlingEnabled); case SUM: case SUM0: return new SumAggregationFunction(arguments, nullHandlingEnabled); case SUMPRECISION: return new SumPrecisionAggregationFunction(arguments, nullHandlingEnabled); case AVG: return new AvgAggregationFunction(arguments, nullHandlingEnabled); case MODE: return new ModeAggregationFunction(arguments, nullHandlingEnabled); case FIRSTWITHTIME: { Preconditions.checkArgument(numArguments == 3, "FIRST_WITH_TIME expects 3 arguments, got: %s. The function can be used as " + "firstWithTime(dataColumn, timeColumn, 'dataType')", numArguments); ExpressionContext timeCol = arguments.get(1); ExpressionContext dataTypeExp = arguments.get(2); Preconditions.checkArgument(dataTypeExp.getType() == ExpressionContext.Type.LITERAL, "FIRST_WITH_TIME expects the 3rd argument to be literal, got: %s. The function can be used as " + "firstWithTime(dataColumn, timeColumn, 'dataType')", dataTypeExp.getType()); DataType dataType = DataType.valueOf(dataTypeExp.getLiteral().getStringValue().toUpperCase()); switch (dataType) { case BOOLEAN: return new FirstIntValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled, true); case INT: return new FirstIntValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled, false); case LONG: return new FirstLongValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled); case FLOAT: return new FirstFloatValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled); case DOUBLE: return new FirstDoubleValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled); case STRING: return new FirstStringValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled); default: throw new IllegalArgumentException("Unsupported data type for FIRST_WITH_TIME: " + dataType); } } case LISTAGG: { Preconditions.checkArgument(numArguments == 2 || numArguments == 3, "LISTAGG expects 2 arguments, got: %s. The function can be used as " + "listAgg([distinct] expression, 'separator')", numArguments); ExpressionContext separatorExpression = arguments.get(1); Preconditions.checkArgument(separatorExpression.getType() == ExpressionContext.Type.LITERAL, "LISTAGG expects the 2nd argument to be literal, got: %s. The function can be used as " + "listAgg([distinct] expression, 'separator')", separatorExpression.getType()); String separator = separatorExpression.getLiteral().getStringValue(); boolean isDistinct = false; if (numArguments == 3) { ExpressionContext isDistinctListAggExp = arguments.get(2); isDistinct = isDistinctListAggExp.getLiteral().getBooleanValue(); } if (isDistinct) { return new ListAggDistinctFunction(arguments.get(0), separator, nullHandlingEnabled); } return new ListAggFunction(arguments.get(0), separator, nullHandlingEnabled); } case SUMARRAYLONG: return new SumArrayLongAggregationFunction(arguments); case SUMARRAYDOUBLE: return new SumArrayDoubleAggregationFunction(arguments); case ARRAYAGG: { Preconditions.checkArgument(numArguments >= 2, "ARRAY_AGG expects 2 or 3 arguments, got: %s. The function can be used as " + "arrayAgg(dataColumn, 'dataType', ['isDistinct'])", numArguments); ExpressionContext dataTypeExp = arguments.get(1); Preconditions.checkArgument(dataTypeExp.getType() == ExpressionContext.Type.LITERAL, "ARRAY_AGG expects the 2nd argument to be literal, got: %s. The function can be used as " + "arrayAgg(dataColumn, 'dataType', ['isDistinct'])", dataTypeExp.getType()); DataType dataType = DataType.valueOf(dataTypeExp.getLiteral().getStringValue().toUpperCase()); boolean isDistinct = false; if (numArguments == 3) { ExpressionContext isDistinctExp = arguments.get(2); Preconditions.checkArgument(isDistinctExp.getType() == ExpressionContext.Type.LITERAL, "ARRAY_AGG expects the 3rd argument to be literal, got: %s. The function can be used as " + "arrayAgg(dataColumn, 'dataType', ['isDistinct'])", isDistinctExp.getType()); isDistinct = isDistinctExp.getLiteral().getBooleanValue(); } if (isDistinct) { switch (dataType) { case BOOLEAN: case INT: return new ArrayAggDistinctIntFunction(firstArgument, dataType, nullHandlingEnabled); case LONG: case TIMESTAMP: return new ArrayAggDistinctLongFunction(firstArgument, dataType, nullHandlingEnabled); case FLOAT: return new ArrayAggDistinctFloatFunction(firstArgument, nullHandlingEnabled); case DOUBLE: return new ArrayAggDistinctDoubleFunction(firstArgument, nullHandlingEnabled); case STRING: return new ArrayAggDistinctStringFunction(firstArgument, nullHandlingEnabled); default: throw new IllegalArgumentException("Unsupported data type for ARRAY_AGG: " + dataType); } } switch (dataType) { case BOOLEAN: case INT: return new ArrayAggIntFunction(firstArgument, dataType, nullHandlingEnabled); case LONG: case TIMESTAMP: return new ArrayAggLongFunction(firstArgument, dataType, nullHandlingEnabled); case FLOAT: return new ArrayAggFloatFunction(firstArgument, nullHandlingEnabled); case DOUBLE: return new ArrayAggDoubleFunction(firstArgument, nullHandlingEnabled); case STRING: return new ArrayAggStringFunction(firstArgument, nullHandlingEnabled); default: throw new IllegalArgumentException("Unsupported data type for ARRAY_AGG: " + dataType); } } case LASTWITHTIME: { Preconditions.checkArgument(numArguments == 3, "LAST_WITH_TIME expects 3 arguments, got: %s. The function can be used as " + "lastWithTime(dataColumn, timeColumn, 'dataType')", numArguments); ExpressionContext timeCol = arguments.get(1); ExpressionContext dataTypeExp = arguments.get(2); Preconditions.checkArgument(dataTypeExp.getType() == ExpressionContext.Type.LITERAL, "LAST_WITH_TIME expects the 3rd argument to be literal, got: %s. The function can be used as " + "lastWithTime(dataColumn, timeColumn, 'dataType')", dataTypeExp.getType()); DataType dataType = DataType.valueOf(dataTypeExp.getLiteral().getStringValue().toUpperCase()); switch (dataType) { case BOOLEAN: return new LastIntValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled, true); case INT: return new LastIntValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled, false); case LONG: return new LastLongValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled); case FLOAT: return new LastFloatValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled); case DOUBLE: return new LastDoubleValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled); case STRING: return new LastStringValueWithTimeAggregationFunction(firstArgument, timeCol, nullHandlingEnabled); default: throw new IllegalArgumentException("Unsupported data type for LAST_WITH_TIME: " + dataType); } } case MINMAXRANGE: return new MinMaxRangeAggregationFunction(arguments, nullHandlingEnabled); case DISTINCTCOUNT: return new DistinctCountAggregationFunction(arguments, nullHandlingEnabled); case DISTINCTCOUNTBITMAP: return new DistinctCountBitmapAggregationFunction(arguments); case SEGMENTPARTITIONEDDISTINCTCOUNT: return new SegmentPartitionedDistinctCountAggregationFunction(arguments); case DISTINCTCOUNTHLL: return new DistinctCountHLLAggregationFunction(arguments); case DISTINCTCOUNTRAWHLL: return new DistinctCountRawHLLAggregationFunction(arguments); case DISTINCTCOUNTSMARTHLL: return new DistinctCountSmartHLLAggregationFunction(arguments); case FASTHLL: return new FastHLLAggregationFunction(arguments); case DISTINCTCOUNTTHETASKETCH: return new DistinctCountThetaSketchAggregationFunction(arguments); case DISTINCTCOUNTRAWTHETASKETCH: return new DistinctCountRawThetaSketchAggregationFunction(arguments); case DISTINCTSUM: return new DistinctSumAggregationFunction(arguments, nullHandlingEnabled); case DISTINCTAVG: return new DistinctAvgAggregationFunction(arguments, nullHandlingEnabled); case IDSET: return new IdSetAggregationFunction(arguments); case COUNTMV: return new CountMVAggregationFunction(arguments); case MINMV: return new MinMVAggregationFunction(arguments); case MAXMV: return new MaxMVAggregationFunction(arguments); case SUMMV: return new SumMVAggregationFunction(arguments); case AVGMV: return new AvgMVAggregationFunction(arguments); case MINMAXRANGEMV: return new MinMaxRangeMVAggregationFunction(arguments); case DISTINCTCOUNTMV: return new DistinctCountMVAggregationFunction(arguments); case DISTINCTCOUNTBITMAPMV: return new DistinctCountBitmapMVAggregationFunction(arguments); case DISTINCTCOUNTHLLMV: return new DistinctCountHLLMVAggregationFunction(arguments); case DISTINCTCOUNTRAWHLLMV: return new DistinctCountRawHLLMVAggregationFunction(arguments); case DISTINCTCOUNTHLLPLUS: return new DistinctCountHLLPlusAggregationFunction(arguments); case DISTINCTCOUNTRAWHLLPLUS: return new DistinctCountRawHLLPlusAggregationFunction(arguments); case DISTINCTCOUNTHLLPLUSMV: return new DistinctCountHLLPlusMVAggregationFunction(arguments); case DISTINCTCOUNTRAWHLLPLUSMV: return new DistinctCountRawHLLPlusMVAggregationFunction(arguments); case DISTINCTSUMMV: return new DistinctSumMVAggregationFunction(arguments); case DISTINCTAVGMV: return new DistinctAvgMVAggregationFunction(arguments); case STUNION: return new StUnionAggregationFunction(arguments); case HISTOGRAM: return new HistogramAggregationFunction(arguments); case COVARPOP: return new CovarianceAggregationFunction(arguments, false); case COVARSAMP: return new CovarianceAggregationFunction(arguments, true); case BOOLAND: return new BooleanAndAggregationFunction(arguments, nullHandlingEnabled); case BOOLOR: return new BooleanOrAggregationFunction(arguments, nullHandlingEnabled); case VARPOP: return new VarianceAggregationFunction(arguments, false, false, nullHandlingEnabled); case VARSAMP: return new VarianceAggregationFunction(arguments, true, false, nullHandlingEnabled); case STDDEVPOP: return new VarianceAggregationFunction(arguments, false, true, nullHandlingEnabled); case STDDEVSAMP: return new VarianceAggregationFunction(arguments, true, true, nullHandlingEnabled); case SKEWNESS: return new FourthMomentAggregationFunction(arguments, FourthMomentAggregationFunction.Type.SKEWNESS); case KURTOSIS: return new FourthMomentAggregationFunction(arguments, FourthMomentAggregationFunction.Type.KURTOSIS); case FOURTHMOMENT: return new FourthMomentAggregationFunction(arguments, FourthMomentAggregationFunction.Type.MOMENT); case DISTINCTCOUNTTUPLESKETCH: // mode actually doesn't matter here because we only care about keys, not values return new DistinctCountIntegerTupleSketchAggregationFunction(arguments, IntegerSummary.Mode.Sum); case DISTINCTCOUNTRAWINTEGERSUMTUPLESKETCH: return new IntegerTupleSketchAggregationFunction(arguments, IntegerSummary.Mode.Sum); case SUMVALUESINTEGERSUMTUPLESKETCH: return new SumValuesIntegerTupleSketchAggregationFunction(arguments, IntegerSummary.Mode.Sum); case AVGVALUEINTEGERSUMTUPLESKETCH: return new AvgValueIntegerTupleSketchAggregationFunction(arguments, IntegerSummary.Mode.Sum); case PINOTPARENTAGGEXPRMAX: return new ParentExprMinMaxAggregationFunction(arguments, true); case PINOTPARENTAGGEXPRMIN: return new ParentExprMinMaxAggregationFunction(arguments, false); case PINOTCHILDAGGEXPRMAX: return new ChildExprMinMaxAggregationFunction(arguments, true); case PINOTCHILDAGGEXPRMIN: return new ChildExprMinMaxAggregationFunction(arguments, false); case EXPRMAX: case EXPRMIN: throw new IllegalArgumentException( "Aggregation function: " + functionType + " is only supported in selection without alias."); case FUNNELCOUNT: return new FunnelCountAggregationFunctionFactory(arguments).get(); case FUNNELMAXSTEP: return new FunnelMaxStepAggregationFunction(arguments); case FUNNELMATCHSTEP: return new FunnelMatchStepAggregationFunction(arguments); case FUNNELCOMPLETECOUNT: return new FunnelCompleteCountAggregationFunction(arguments); case FREQUENTSTRINGSSKETCH: return new FrequentStringsSketchAggregationFunction(arguments); case FREQUENTLONGSSKETCH: return new FrequentLongsSketchAggregationFunction(arguments); case DISTINCTCOUNTCPCSKETCH: return new DistinctCountCPCSketchAggregationFunction(arguments); case DISTINCTCOUNTRAWCPCSKETCH: return new DistinctCountRawCPCSketchAggregationFunction(arguments); case DISTINCTCOUNTULL: return new DistinctCountULLAggregationFunction(arguments); case DISTINCTCOUNTRAWULL: return new DistinctCountRawULLAggregationFunction(arguments); default: throw new IllegalArgumentException("Unsupported aggregation function type: " + functionType); } } } catch (Exception e) { throw new BadQueryRequestException("Invalid aggregation function: " + function + "; Reason: " + e.getMessage()); } }
@Test public void testGetAggregationFunction() { FunctionContext function = getFunction("CoUnT", ARGUMENT_STAR); AggregationFunction aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof CountAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.COUNT); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("MiN"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof MinAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.MIN); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("MaX"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof MaxAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.MAX); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("SuM"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof SumAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.SUM); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("SuMPreCIsiON"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof SumPrecisionAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.SUMPRECISION); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("AvG"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof AvgAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.AVG); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("MoDe"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof ModeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.MODE); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("FiRsTwItHtImE", "(column,timeColumn,'BOOLEAN')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof FirstIntValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.FIRSTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("FiRsTwItHtImE", "(column,timeColumn,'INT')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof FirstIntValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.FIRSTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("FiRsTwItHtImE", "(column,timeColumn,'LONG')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof FirstLongValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.FIRSTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("FiRsTwItHtImE", "(column,timeColumn,'FLOAT')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof FirstFloatValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.FIRSTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("FiRsTwItHtImE", "(column,timeColumn,'DOUBLE')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof FirstDoubleValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.FIRSTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("FiRsTwItHtImE", "(column,timeColumn,'STRING')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof FirstStringValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.FIRSTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("LaStWiThTiMe", "(column,timeColumn,'BOOLEAN')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof LastIntValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.LASTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("LaStWiThTiMe", "(column,timeColumn,'INT')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof LastIntValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.LASTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("LaStWiThTiMe", "(column,timeColumn,'LONG')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof LastLongValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.LASTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("LaStWiThTiMe", "(column,timeColumn,'FLOAT')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof LastFloatValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.LASTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("LaStWiThTiMe", "(column,timeColumn,'DOUBLE')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof LastDoubleValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.LASTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("LaStWiThTiMe", "(column,timeColumn,'STRING')"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof LastStringValueWithTimeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.LASTWITHTIME); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("MiNmAxRaNgE"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof MinMaxRangeAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.MINMAXRANGE); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCtCoUnT"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNT); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCtCoUnThLl"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountHLLAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNTHLL); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCtCoUnTrAwHlL"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountRawHLLAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNTRAWHLL); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCtCoUnThLlPlUs"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountHLLPlusAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNTHLLPLUS); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCtCoUnTrAwHlLpLuS"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountRawHLLPlusAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNTRAWHLLPLUS); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("FaStHlL"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof FastHLLAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.FASTHLL); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLe5"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILE); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLeEsT50"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileEstAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILEEST); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLeRaWEsT50"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileRawEstAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILERAWEST); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLeTdIgEsT99"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileTDigestAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILETDIGEST); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLeRaWTdIgEsT99"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileRawTDigestAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILERAWTDIGEST); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLe", "(column, 5)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILE); assertEquals(aggregationFunction.getResultColumnName(), "percentile(column, 5.0)"); function = getFunction("PeRcEnTiLe", "(column, 5.5)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILE); assertEquals(aggregationFunction.getResultColumnName(), "percentile(column, 5.5)"); function = getFunction("PeRcEnTiLeEsT", "(column, 50)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileEstAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILEEST); assertEquals(aggregationFunction.getResultColumnName(), "percentileest(column, 50.0)"); function = getFunction("PeRcEnTiLeRaWeSt", "(column, 50)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileRawEstAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILERAWEST); assertEquals(aggregationFunction.getResultColumnName(), "percentilerawest(column, 50.0)"); function = getFunction("PeRcEnTiLeEsT", "(column, 55.555)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileEstAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILEEST); assertEquals(aggregationFunction.getResultColumnName(), "percentileest(column, 55.555)"); function = getFunction("PeRcEnTiLeRaWeSt", "(column, 55.555)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileRawEstAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILERAWEST); assertEquals(aggregationFunction.getResultColumnName(), "percentilerawest(column, 55.555)"); function = getFunction("PeRcEnTiLeTdIgEsT", "(column, 99)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileTDigestAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILETDIGEST); assertEquals(aggregationFunction.getResultColumnName(), "percentiletdigest(column, 99.0)"); function = getFunction("PeRcEnTiLeTdIgEsT", "(column, 99.9999)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileTDigestAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILETDIGEST); assertEquals(aggregationFunction.getResultColumnName(), "percentiletdigest(column, 99.9999)"); function = getFunction("PeRcEnTiLeTdIgEsT", "(column, 99.9999, 1000)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileTDigestAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILETDIGEST); assertEquals(aggregationFunction.getResultColumnName(), "percentiletdigest(column, 99.9999, 1000)"); function = getFunction("PeRcEnTiLeRaWtDiGeSt", "(column, 99)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileRawTDigestAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILERAWTDIGEST); assertEquals(aggregationFunction.getResultColumnName(), "percentilerawtdigest(column, 99.0)"); function = getFunction("PeRcEnTiLeRaWtDiGeSt", "(column, 99.9999)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileRawTDigestAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILERAWTDIGEST); assertEquals(aggregationFunction.getResultColumnName(), "percentilerawtdigest(column, 99.9999)"); function = getFunction("PeRcEntiLEkll", "(column, 99.9999)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileKLLAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILEKLL); assertEquals(aggregationFunction.getResultColumnName(), "percentilekll(column, 99.9999)"); function = getFunction("PeRcEnTiLeRaWtDiGeSt", "(column, 99.9999, 500)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileRawTDigestAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILERAWTDIGEST); assertEquals(aggregationFunction.getResultColumnName(), "percentilerawtdigest(column, 99.9999, 500)"); function = getFunction("PeRcEnTiLeRaWtDiGeSt", "(column, 99.9999, 100)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileRawTDigestAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILERAWTDIGEST); assertEquals(aggregationFunction.getResultColumnName(), "percentilerawtdigest(column, 99.9999)"); function = getFunction("CoUnTmV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof CountMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.COUNTMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("MiNmV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof MinMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.MINMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("MaXmV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof MaxMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.MAXMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("SuMmV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof SumMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.SUMMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("AvGmV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof AvgMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.AVGMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("AvG_mV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof AvgMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.AVGMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("MiNmAxRaNgEmV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof MinMaxRangeMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.MINMAXRANGEMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCtCoUnTmV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNTMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCtCoUnThLlMv"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountHLLMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNTHLLMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCt_CoUnT_hLl_Mv"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountHLLMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNTHLLMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCtCoUnTrAwHlLmV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountRawHLLMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNTRAWHLLMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCt_CoUnT_hLl_PlUs_Mv"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountHLLPlusMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNTHLLPLUSMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("DiStInCtCoUnTrAwHlLpLuS_mV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof DistinctCountRawHLLPlusMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.DISTINCTCOUNTRAWHLLPLUSMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLe10Mv"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILEMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLeEsT90mV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileEstMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILEESTMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLeTdIgEsT95mV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileTDigestMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILETDIGESTMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLe_TdIgEsT_95_mV"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileTDigestMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILETDIGESTMV); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("PeRcEnTiLeMv", "(column, 10)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILEMV); assertEquals(aggregationFunction.getResultColumnName(), "percentilemv(column, 10.0)"); function = getFunction("PeRcEnTiLeEsTmV", "(column, 90)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileEstMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILEESTMV); assertEquals(aggregationFunction.getResultColumnName(), "percentileestmv(column, 90.0)"); function = getFunction("PeRcEnTiLeTdIgEsTmV", "(column, 95)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileTDigestMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILETDIGESTMV); assertEquals(aggregationFunction.getResultColumnName(), "percentiletdigestmv(column, 95.0)"); function = getFunction("PeRcEnTiLeTdIgEsTmV", "(column, 95, 1000)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileTDigestMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILETDIGESTMV); assertEquals(aggregationFunction.getResultColumnName(), "percentiletdigestmv(column, 95.0, 1000)"); function = getFunction("PeRcEnTiLe_TdIgEsT_mV", "(column, 95)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileTDigestMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILETDIGESTMV); assertEquals(aggregationFunction.getResultColumnName(), "percentiletdigestmv(column, 95.0)"); function = getFunction("PeRcEnTiLe_TdIgEsT_mV", "(column, 95, 200)"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof PercentileTDigestMVAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.PERCENTILETDIGESTMV); assertEquals(aggregationFunction.getResultColumnName(), "percentiletdigestmv(column, 95.0, 200)"); function = getFunction("bool_and"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof BooleanAndAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.BOOLAND); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("bool_or"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof BooleanOrAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.BOOLOR); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("skewness"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof FourthMomentAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.SKEWNESS); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); function = getFunction("kurtosis"); aggregationFunction = AggregationFunctionFactory.getAggregationFunction(function, false); assertTrue(aggregationFunction instanceof FourthMomentAggregationFunction); assertEquals(aggregationFunction.getType(), AggregationFunctionType.KURTOSIS); assertEquals(aggregationFunction.getResultColumnName(), function.toString()); }
@Override public Organizations listOrganizations(String appUrl, AccessToken accessToken, int page, int pageSize) { checkPageArgs(page, pageSize); try { Organizations organizations = new Organizations(); GetResponse response = githubApplicationHttpClient.get(appUrl, accessToken, String.format("/user/installations?page=%s&per_page=%s", page, pageSize)); Optional<GsonInstallations> gsonInstallations = response.getContent().map(content -> GSON.fromJson(content, GsonInstallations.class)); if (!gsonInstallations.isPresent()) { return organizations; } organizations.setTotal(gsonInstallations.get().getTotalCount()); if (gsonInstallations.get().getInstallations() != null) { organizations.setOrganizations(gsonInstallations.get().getInstallations().stream() .map(gsonInstallation -> new Organization(gsonInstallation.getAccount().getId(), gsonInstallation.getAccount().getLogin(), null, null, null, null, null, gsonInstallation.getTargetType())) .toList()); } return organizations; } catch (IOException e) { throw new IllegalStateException(format("Failed to list all organizations accessible by user access token on %s", appUrl), e); } }
@Test public void listOrganizations_fail_if_pageIndex_out_of_bounds() { UserAccessToken token = new UserAccessToken("token"); assertThatThrownBy(() -> underTest.listOrganizations(appUrl, token, 0, 100)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("'page' must be larger than 0."); }
public Iterator<T> getBookmark() { LinkedSetIterator toRet = new LinkedSetIterator(); toRet.next = this.bookmark.next; this.bookmark = toRet; return toRet; }
@Test(timeout=60000) public void testBookmarkSetToHeadOnAddToEmpty() { LOG.info("Test bookmark is set after adding to previously empty set."); Iterator<Integer> it = set.getBookmark(); assertFalse(it.hasNext()); set.add(list.get(0)); set.add(list.get(1)); it = set.getBookmark(); assertTrue(it.hasNext()); assertEquals(it.next(), list.get(0)); assertEquals(it.next(), list.get(1)); assertFalse(it.hasNext()); }
@Override public void setIndex(int readerIndex, int writerIndex) { if (readerIndex < 0 || readerIndex > writerIndex || writerIndex > capacity()) { throw new IndexOutOfBoundsException(); } this.readerIndex = readerIndex; this.writerIndex = writerIndex; }
@Test void setIndexBoundaryCheck1() { Assertions.assertThrows(IndexOutOfBoundsException.class, () -> buffer.setIndex(-1, CAPACITY)); }
@SuppressWarnings({"BooleanExpressionComplexity", "CyclomaticComplexity"}) public static boolean isScalablePushQuery( final Statement statement, final KsqlExecutionContext ksqlEngine, final KsqlConfig ksqlConfig, final Map<String, Object> overrides ) { if (!isPushV2Enabled(ksqlConfig, overrides)) { return false; } if (! (statement instanceof Query)) { return false; } final Query query = (Query) statement; final SourceFinder sourceFinder = new SourceFinder(); sourceFinder.process(query.getFrom(), null); // It will be present if it's not a join, which we don't handle if (!sourceFinder.getSourceName().isPresent()) { return false; } // Find all of the writers to this particular source. final SourceName sourceName = sourceFinder.getSourceName().get(); final Set<QueryId> upstreamQueries = ksqlEngine.getQueriesWithSink(sourceName); // See if the config or override have set the stream to be "latest" final boolean isLatest = isLatest(ksqlConfig, overrides); // Cannot be a pull query, i.e. must be a push return !query.isPullQuery() // Group by is not supported && !query.getGroupBy().isPresent() // Windowing is not supported && !query.getWindow().isPresent() // Having clause is not supported && !query.getHaving().isPresent() // Partition by is not supported && !query.getPartitionBy().isPresent() // There must be an EMIT CHANGES clause && (query.getRefinement().isPresent() && query.getRefinement().get().getOutputRefinement() == OutputRefinement.CHANGES) // Must be reading from "latest" && isLatest // We only handle a single sink source at the moment from a CTAS/CSAS && upstreamQueries.size() == 1 // ROWPARTITION and ROWOFFSET are not currently supported in SPQs && !containsDisallowedColumns(query); }
@Test public void shouldNotMakeQueryWithRowpartitionInWhereClauseScalablePush() { try(MockedStatic<ColumnExtractor> columnExtractor = mockStatic(ColumnExtractor.class)) { // Given: expectIsSPQ(ColumnName.of("foo"), columnExtractor); givenWhereClause(SystemColumns.ROWPARTITION_NAME, columnExtractor); // When: final boolean isScalablePush = ScalablePushUtil.isScalablePushQuery( query, ksqlEngine, ksqlConfig, overrides ); // Then: assert(!isScalablePush); } }
public static String getKey(String dataId, String group) { return getKey(dataId, group, ""); }
@Test void testGetKeyGroupParam() { assertThrows(IllegalArgumentException.class, () -> { GroupKey.getKey("a", ""); }); }
@Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser optParser = new OptionParser(); OptionSpec<Long> offsetOpt = optParser.accepts("offset", "offset for reading input").withRequiredArg() .ofType(Long.class).defaultsTo(Long.valueOf(0)); OptionSpec<Long> limitOpt = optParser.accepts("limit", "maximum number of records in the outputfile") .withRequiredArg().ofType(Long.class).defaultsTo(Long.MAX_VALUE); OptionSpec<Double> fracOpt = optParser.accepts("samplerate", "rate at which records will be collected") .withRequiredArg().ofType(Double.class).defaultsTo(Double.valueOf(1)); OptionSet opts = optParser.parse(args.toArray(new String[0])); List<String> nargs = (List<String>) opts.nonOptionArguments(); if (nargs.size() < 2) { printHelp(out); return 0; } inFiles = Util.getFiles(nargs.subList(0, nargs.size() - 1)); System.out.println("List of input files:"); for (Path p : inFiles) { System.out.println(p); } currentInput = -1; nextInput(); OutputStream output = out; String lastArg = nargs.get(nargs.size() - 1); if (nargs.size() > 1 && !lastArg.equals("-")) { output = Util.createFromFS(lastArg); } writer = new DataFileWriter<>(new GenericDatumWriter<>()); String codecName = reader.getMetaString(DataFileConstants.CODEC); CodecFactory codec = (codecName == null) ? CodecFactory.fromString(DataFileConstants.NULL_CODEC) : CodecFactory.fromString(codecName); writer.setCodec(codec); for (String key : reader.getMetaKeys()) { if (!DataFileWriter.isReservedMeta(key)) { writer.setMeta(key, reader.getMeta(key)); } } writer.create(schema, output); long offset = opts.valueOf(offsetOpt); long limit = opts.valueOf(limitOpt); double samplerate = opts.valueOf(fracOpt); sampleCounter = 1; totalCopied = 0; reuse = null; if (limit < 0) { System.out.println("limit has to be non-negative"); this.printHelp(out); return 1; } if (offset < 0) { System.out.println("offset has to be non-negative"); this.printHelp(out); return 1; } if (samplerate < 0 || samplerate > 1) { System.out.println("samplerate has to be a number between 0 and 1"); this.printHelp(out); return 1; } skip(offset); writeRecords(limit, samplerate); System.out.println(totalCopied + " records written."); writer.flush(); writer.close(); Util.close(out); return 0; }
@Test void helpfulMessageWhenNoArgsGiven() throws Exception { ByteArrayOutputStream buffer = new ByteArrayOutputStream(1024); int returnCode; try (PrintStream out = new PrintStream(buffer)) { returnCode = new CatTool().run(System.in, out, System.err, Collections.emptyList()); } assertEquals(0, returnCode); assertTrue(buffer.toString().trim().length() > 200, "should have lots of help"); }
@Override @MethodNotAvailable public CompletionStage<V> removeAsync(K key) { throw new MethodNotAvailableException(); }
@Test(expected = MethodNotAvailableException.class) public void testRemoveAsync() { adapter.removeAsync(23); }
@Override public void updateRewardActivity(RewardActivityUpdateReqVO updateReqVO) { // 校验存在 RewardActivityDO dbRewardActivity = validateRewardActivityExists(updateReqVO.getId()); if (dbRewardActivity.getStatus().equals(PromotionActivityStatusEnum.CLOSE.getStatus())) { // 已关闭的活动,不能修改噢 throw exception(REWARD_ACTIVITY_UPDATE_FAIL_STATUS_CLOSED); } // 校验商品是否冲突 validateRewardActivitySpuConflicts(updateReqVO.getId(), updateReqVO.getProductSpuIds()); // 更新 RewardActivityDO updateObj = RewardActivityConvert.INSTANCE.convert(updateReqVO) .setStatus(PromotionUtils.calculateActivityStatus(updateReqVO.getEndTime())); rewardActivityMapper.updateById(updateObj); }
@Test public void testUpdateRewardActivity_notExists() { // 准备参数 RewardActivityUpdateReqVO reqVO = randomPojo(RewardActivityUpdateReqVO.class); // 调用, 并断言异常 assertServiceException(() -> rewardActivityService.updateRewardActivity(reqVO), REWARD_ACTIVITY_NOT_EXISTS); }
public static void validateExtensions(SaslExtensions extensions) throws SaslException { if (extensions == null) return; if (extensions.map().containsKey(OAuthBearerClientInitialResponse.AUTH_KEY)) throw new SaslException("Extension name " + OAuthBearerClientInitialResponse.AUTH_KEY + " is invalid"); for (Map.Entry<String, String> entry : extensions.map().entrySet()) { String extensionName = entry.getKey(); String extensionValue = entry.getValue(); if (!EXTENSION_KEY_PATTERN.matcher(extensionName).matches()) throw new SaslException("Extension name " + extensionName + " is invalid"); if (!EXTENSION_VALUE_PATTERN.matcher(extensionValue).matches()) throw new SaslException("Extension value (" + extensionValue + ") for extension " + extensionName + " is invalid"); } }
@Test public void testValidateNullExtensions() throws Exception { OAuthBearerClientInitialResponse.validateExtensions(null); }
@Override public final T apply(@Nullable F input) { checkArgument(input != null, "Null inputs are not allowed in this function"); return doApply(input); }
@Test public void fail_if_null_input() { try { underTest.apply(null); fail(); } catch (IllegalArgumentException e) { assertThat(e).hasMessage("Null inputs are not allowed in this function"); } }
public Properties getProperties() { return properties; }
@Test public void testUriWithSessionProperties() throws SQLException { String sessionProperties = "sessionProp1:sessionValue1;sessionProp2:sessionValue2"; PrestoDriverUri parameters = createDriverUri("presto://localhost:8080?sessionProperties=" + sessionProperties); Properties properties = parameters.getProperties(); assertEquals(properties.getProperty(SESSION_PROPERTIES.getKey()), sessionProperties); }
@Override public TbPair<Boolean, JsonNode> upgrade(int fromVersion, JsonNode oldConfiguration) throws TbNodeException { return fromVersion == 0 ? upgradeRuleNodesWithOldPropertyToUseFetchTo( oldConfiguration, "addToMetadata", TbMsgSource.METADATA.name(), TbMsgSource.DATA.name()) : new TbPair<>(false, oldConfiguration); }
@Test public void givenOldConfig_whenUpgrade_thenShouldReturnTrueResultWithNewConfig() throws Exception { var defaultConfig = new TbGetCustomerDetailsNodeConfiguration().defaultConfiguration(); var node = new TbGetCustomerDetailsNode(); String oldConfig = "{\"detailsList\":[],\"addToMetadata\":false}"; JsonNode configJson = JacksonUtil.toJsonNode(oldConfig); TbPair<Boolean, JsonNode> upgrade = node.upgrade(0, configJson); Assertions.assertTrue(upgrade.getFirst()); Assertions.assertEquals(defaultConfig, JacksonUtil.treeToValue(upgrade.getSecond(), defaultConfig.getClass())); }
@Override public ExecuteContext after(ExecuteContext context) { ThreadLocalUtils.removeRequestData(); return context; }
@Test public void testAfter() { ThreadLocalUtils.setRequestData(new RequestData(null, "", "")); interceptor.after(context); Assert.assertNull(ThreadLocalUtils.getRequestData()); }
public void sendDataWithCallback(AckEntry ackEntry, PushCallBack pushCallBack) { if (null == ackEntry) { return; } GlobalExecutor.scheduleUdpSender(new UdpAsyncSender(ackEntry, pushCallBack), 0L, TimeUnit.MILLISECONDS); }
@Test void testSendDataWithCallback() throws IOException, InterruptedException { when(udpSocket.isClosed()).thenReturn(false); AckEntry ackEntry = new AckEntry("A", new DatagramPacket(new byte[2], 2)); udpConnector.sendDataWithCallback(ackEntry, new PushCallBack() { @Override public long getTimeout() { return 0; } @Override public void onSuccess() { } @Override public void onFail(Throwable e) { fail(e.getMessage()); } }); Thread.sleep(100); Mockito.verify(udpSocket).send(ackEntry.getOrigin()); }
@Override protected boolean onJoinPrepare(Timer timer, int generation, String memberId) { log.debug("Executing onJoinPrepare with generation {} and memberId {}", generation, memberId); if (joinPrepareTimer == null) { // We should complete onJoinPrepare before rebalanceTimeoutMs, // and continue to join group to avoid member got kicked out from group joinPrepareTimer = time.timer(rebalanceConfig.rebalanceTimeoutMs); } else { joinPrepareTimer.update(); } // async commit offsets prior to rebalance if auto-commit enabled // and there is no in-flight offset commit request if (autoCommitEnabled && autoCommitOffsetRequestFuture == null) { maybeMarkPartitionsPendingRevocation(); autoCommitOffsetRequestFuture = maybeAutoCommitOffsetsAsync(); } // wait for commit offset response before timer expired if (autoCommitOffsetRequestFuture != null) { Timer pollTimer = timer.remainingMs() < joinPrepareTimer.remainingMs() ? timer : joinPrepareTimer; client.poll(autoCommitOffsetRequestFuture, pollTimer); joinPrepareTimer.update(); // Keep retrying/waiting the offset commit when: // 1. offset commit haven't done (and joinPrepareTimer not expired) // 2. failed with retriable exception (and joinPrepareTimer not expired) // Otherwise, continue to revoke partitions, ex: // 1. if joinPrepareTimer has expired // 2. if offset commit failed with non-retriable exception // 3. if offset commit success boolean onJoinPrepareAsyncCommitCompleted = true; if (joinPrepareTimer.isExpired()) { log.error("Asynchronous auto-commit of offsets failed: joinPrepare timeout. Will continue to join group"); } else if (!autoCommitOffsetRequestFuture.isDone()) { onJoinPrepareAsyncCommitCompleted = false; } else if (autoCommitOffsetRequestFuture.failed() && autoCommitOffsetRequestFuture.isRetriable()) { log.debug("Asynchronous auto-commit of offsets failed with retryable error: {}. Will retry it.", autoCommitOffsetRequestFuture.exception().getMessage()); onJoinPrepareAsyncCommitCompleted = false; } else if (autoCommitOffsetRequestFuture.failed() && !autoCommitOffsetRequestFuture.isRetriable()) { log.error("Asynchronous auto-commit of offsets failed: {}. Will continue to join group.", autoCommitOffsetRequestFuture.exception().getMessage()); } if (autoCommitOffsetRequestFuture.isDone()) { autoCommitOffsetRequestFuture = null; } if (!onJoinPrepareAsyncCommitCompleted) { pollTimer.sleep(Math.min(pollTimer.remainingMs(), rebalanceConfig.retryBackoffMs)); timer.update(); return false; } } // the generation / member-id can possibly be reset by the heartbeat thread // upon getting errors or heartbeat timeouts; in this case whatever is previously // owned partitions would be lost, we should trigger the callback and cleanup the assignment; // otherwise we can proceed normally and revoke the partitions depending on the protocol, // and in that case we should only change the assignment AFTER the revoke callback is triggered // so that users can still access the previously owned partitions to commit offsets etc. Exception exception = null; final SortedSet<TopicPartition> revokedPartitions = new TreeSet<>(COMPARATOR); if (generation == Generation.NO_GENERATION.generationId || memberId.equals(Generation.NO_GENERATION.memberId)) { revokedPartitions.addAll(subscriptions.assignedPartitions()); if (!revokedPartitions.isEmpty()) { log.info("Giving away all assigned partitions as lost since generation/memberID has been reset," + "indicating that consumer is in old state or no longer part of the group"); exception = rebalanceListenerInvoker.invokePartitionsLost(revokedPartitions); subscriptions.assignFromSubscribed(Collections.emptySet()); } } else { switch (protocol) { case EAGER: // revoke all partitions revokedPartitions.addAll(subscriptions.assignedPartitions()); exception = rebalanceListenerInvoker.invokePartitionsRevoked(revokedPartitions); subscriptions.assignFromSubscribed(Collections.emptySet()); break; case COOPERATIVE: // only revoke those partitions that are not in the subscription anymore. Set<TopicPartition> ownedPartitions = new HashSet<>(subscriptions.assignedPartitions()); revokedPartitions.addAll(ownedPartitions.stream() .filter(tp -> !subscriptions.subscription().contains(tp.topic())) .collect(Collectors.toSet())); if (!revokedPartitions.isEmpty()) { exception = rebalanceListenerInvoker.invokePartitionsRevoked(revokedPartitions); ownedPartitions.removeAll(revokedPartitions); subscriptions.assignFromSubscribed(ownedPartitions); } break; } } isLeader = false; subscriptions.resetGroupSubscription(); joinPrepareTimer = null; autoCommitOffsetRequestFuture = null; timer.update(); if (exception != null) { throw new KafkaException("User rebalance callback throws an error", exception); } return true; }
@Test public void testJoinPrepareWithDisableAutoCommit() { try (ConsumerCoordinator coordinator = prepareCoordinatorForCloseTest(true, false, Optional.of("group-id"), true)) { coordinator.ensureActiveGroup(); prepareOffsetCommitRequest(singletonMap(t1p, 100L), Errors.NONE); int generationId = 42; String memberId = "consumer-42"; boolean res = coordinator.onJoinPrepare(time.timer(0L), generationId, memberId); assertTrue(res); assertTrue(client.hasPendingResponses()); assertFalse(client.hasInFlightRequests()); assertFalse(coordinator.coordinatorUnknown()); } }
@Override public final void isEqualTo(@Nullable Object other) { @SuppressWarnings("UndefinedEquals") // the contract of this method is to follow Multimap.equals boolean isEqual = Objects.equal(actual, other); if (isEqual) { return; } // Fail but with a more descriptive message: if ((actual instanceof ListMultimap && other instanceof SetMultimap) || (actual instanceof SetMultimap && other instanceof ListMultimap)) { String actualType = (actual instanceof ListMultimap) ? "ListMultimap" : "SetMultimap"; String otherType = (other instanceof ListMultimap) ? "ListMultimap" : "SetMultimap"; failWithoutActual( fact("expected", other), fact("an instance of", otherType), fact("but was", actualCustomStringRepresentationForPackageMembersToCall()), fact("an instance of", actualType), simpleFact( lenientFormat( "a %s cannot equal a %s if either is non-empty", actualType, otherType))); } else if (actual instanceof ListMultimap) { containsExactlyEntriesIn((Multimap<?, ?>) checkNotNull(other)).inOrder(); } else if (actual instanceof SetMultimap) { containsExactlyEntriesIn((Multimap<?, ?>) checkNotNull(other)); } else { super.isEqualTo(other); } }
@Test public void listMultimapIsEqualTo_fails() { ImmutableListMultimap<String, String> multimapA = ImmutableListMultimap.<String, String>builder() .putAll("kurt", "kluever", "russell", "cobain") .build(); ImmutableListMultimap<String, String> multimapB = ImmutableListMultimap.<String, String>builder() .putAll("kurt", "kluever", "cobain", "russell") .build(); expectFailureWhenTestingThat(multimapA).isEqualTo(multimapB); assertFailureKeys( "contents match, but order was wrong", "keys with out-of-order values", "---", "expected", "but was"); assertFailureValue("keys with out-of-order values", "[kurt]"); assertFailureValue("expected", "{kurt=[kluever, cobain, russell]}"); assertFailureValue("but was", "{kurt=[kluever, russell, cobain]}"); }
public T send() throws IOException { return web3jService.send(this, responseType); }
@Test public void testTxPoolStatus() throws Exception { web3j.txPoolStatus().send(); verifyResult("{\"jsonrpc\":\"2.0\",\"method\":\"txpool_status\",\"params\":[],\"id\":1}"); }
@Override public Decoder<Object> getMapValueDecoder() { return mapValueDecoder; }
@Test public void shouldDeserializeTheStringCorrectly() throws Exception { ByteBuf buf = ByteBufAllocator.DEFAULT.buffer(); buf.writeBytes(new ObjectMapper().writeValueAsBytes("axk")); assertThat(stringCodec.getMapValueDecoder().decode(buf, new State())) .isInstanceOf(String.class) .isEqualTo("axk"); buf.release(); }
@Override public BeamSqlTable buildBeamSqlTable(Table table) { return new BigQueryTable(table, getConversionOptions(table.getProperties())); }
@Test public void testBuildBeamSqlTable() throws Exception { Table table = fakeTable("hello"); BeamSqlTable sqlTable = provider.buildBeamSqlTable(table); assertNotNull(sqlTable); assertTrue(sqlTable instanceof BigQueryTable); BigQueryTable bqTable = (BigQueryTable) sqlTable; assertEquals("project:dataset.table", bqTable.bqLocation); }
public ByTopicRecordTranslator<K, V> forTopic(String topic, Func<ConsumerRecord<K, V>, List<Object>> func, Fields fields) { return forTopic(topic, new SimpleRecordTranslator<>(func, fields)); }
@Test public void testFieldCollision() { assertThrows(IllegalArgumentException.class, () -> { ByTopicRecordTranslator<String, String> trans = new ByTopicRecordTranslator<>((r) -> new Values(r.key()), new Fields("key")); trans.forTopic("foo", (r) -> new Values(r.value()), new Fields("value")); }); }
@Override public Object getValue(final int columnIndex, final Class<?> type) throws SQLException { if (boolean.class == type) { return resultSet.getBoolean(columnIndex); } if (byte.class == type) { return resultSet.getByte(columnIndex); } if (short.class == type) { return resultSet.getShort(columnIndex); } if (int.class == type) { return resultSet.getInt(columnIndex); } if (long.class == type) { return resultSet.getLong(columnIndex); } if (float.class == type) { return resultSet.getFloat(columnIndex); } if (double.class == type) { return resultSet.getDouble(columnIndex); } if (String.class == type) { return resultSet.getString(columnIndex); } if (BigDecimal.class == type) { return resultSet.getBigDecimal(columnIndex); } if (byte[].class == type) { return resultSet.getBytes(columnIndex); } if (Date.class == type) { return resultSet.getDate(columnIndex); } if (Time.class == type) { return resultSet.getTime(columnIndex); } if (Timestamp.class == type) { return resultSet.getTimestamp(columnIndex); } if (Blob.class == type) { return resultSet.getBlob(columnIndex); } if (Clob.class == type) { return resultSet.getClob(columnIndex); } if (Array.class == type) { return resultSet.getArray(columnIndex); } return resultSet.getObject(columnIndex); }
@Test void assertGetValueByInt() throws SQLException { ResultSet resultSet = mock(ResultSet.class); when(resultSet.getInt(1)).thenReturn(1); assertThat(new JDBCStreamQueryResult(resultSet).getValue(1, int.class), is(1)); }
@Override public ExecuteContext after(ExecuteContext context) { ThreadLocalUtils.removeRequestTag(); ThreadLocalUtils.removeRequestData(); return context; }
@Test public void testAfter() { ThreadLocalUtils.setRequestData(new RequestData(Collections.emptyMap(), "", "")); interceptor.after(context); Assert.assertNull(ThreadLocalUtils.getRequestData()); }
public int sendHeartbeat( final String addr, final HeartbeatData heartbeatData, final long timeoutMillis ) throws RemotingException, MQBrokerException, InterruptedException { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.HEART_BEAT, new HeartbeatRequestHeader()); request.setLanguage(clientConfig.getLanguage()); request.setBody(heartbeatData.encode()); RemotingCommand response = this.remotingClient.invokeSync(addr, request, timeoutMillis); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { return response.getVersion(); } default: break; } throw new MQBrokerException(response.getCode(), response.getRemark(), addr); }
@Test public void assertSendHeartbeat() throws MQBrokerException, RemotingException, InterruptedException { mockInvokeSync(); HeartbeatData heartbeatData = new HeartbeatData(); assertEquals(1, mqClientAPI.sendHeartbeat(defaultBrokerAddr, heartbeatData, defaultTimeout)); }
@Override @Transactional public void sendMessage(String message, String channel) { logger.info("Sending message {} to channel {}", message, channel); if (!Objects.equals(channel, Topics.APOLLO_RELEASE_TOPIC)) { logger.warn("Channel {} not supported by DatabaseMessageSender!", channel); return; } Tracer.logEvent("Apollo.AdminService.ReleaseMessage", message); Transaction transaction = Tracer.newTransaction("Apollo.AdminService", "sendMessage"); try { ReleaseMessage newMessage = releaseMessageRepository.save(new ReleaseMessage(message)); if(!toClean.offer(newMessage.getId())){ logger.warn("Queue is full, Failed to add message {} to clean queue", newMessage.getId()); } transaction.setStatus(Transaction.SUCCESS); } catch (Throwable ex) { logger.error("Sending message to database failed", ex); transaction.setStatus(ex); throw ex; } finally { transaction.complete(); } }
@Test public void testSendMessage() throws Exception { String someMessage = "some-message"; long someId = 1; ReleaseMessage someReleaseMessage = mock(ReleaseMessage.class); when(someReleaseMessage.getId()).thenReturn(someId); when(releaseMessageRepository.save(any(ReleaseMessage.class))).thenReturn(someReleaseMessage); ArgumentCaptor<ReleaseMessage> captor = ArgumentCaptor.forClass(ReleaseMessage.class); messageSender.sendMessage(someMessage, Topics.APOLLO_RELEASE_TOPIC); verify(releaseMessageRepository, times(1)).save(captor.capture()); assertEquals(someMessage, captor.getValue().getMessage()); }
public static void mergeParams( Map<String, ParamDefinition> params, Map<String, ParamDefinition> paramsToMerge, MergeContext context) { if (paramsToMerge == null) { return; } Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream()) .forEach( name -> { ParamDefinition paramToMerge = paramsToMerge.get(name); if (paramToMerge == null) { return; } if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) { Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name); Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name); mergeParams( baseMap, toMergeMap, MergeContext.copyWithParentMode( context, params.getOrDefault(name, paramToMerge).getMode())); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else if (paramToMerge.getType() == ParamType.STRING_MAP && paramToMerge.isLiteral()) { Map<String, String> baseMap = stringMapValueOrEmpty(params, name); Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name); baseMap.putAll(toMergeMap); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else { params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, paramToMerge.getValue())); } }); }
@Test public void testMergeDisallowUpstreamChangesNoSource() throws JsonProcessingException { for (ParamMode mode : Arrays.asList(ParamMode.CONSTANT, ParamMode.IMMUTABLE)) { Map<String, ParamDefinition> allParams = parseParamDefMap( String.format( "{'tomerge': {'type': 'STRING','value': 'hello', 'mode': '%s'}}", mode.toString())); Map<String, ParamDefinition> paramsToMergeNoSource = parseParamDefMap("{'tomerge': {'type': 'STRING', 'value': 'goodbye'}}"); AssertHelper.assertThrows( String.format("Should not allow modifying reserved modes, mode [%s]", mode), MaestroValidationException.class, String.format("Cannot modify param with mode [%s] for parameter [tomerge]", mode), () -> ParamsMergeHelper.mergeParams( allParams, paramsToMergeNoSource, upstreamMergeContext)); } }
@Override public Map<String, Boolean> getUserUuidToManaged(DbSession dbSession, Set<String> userUuids) { return findManagedInstanceService() .map(managedInstanceService -> managedInstanceService.getUserUuidToManaged(dbSession, userUuids)) .orElse(returnNonManagedForAll(userUuids)); }
@Test public void getUserUuidToManaged_whenNoDelegates_setAllUsersAsNonManaged() { Set<String> userUuids = Set.of("a", "b"); Map<String, Boolean> userUuidToManaged = NO_MANAGED_SERVICES.getUserUuidToManaged(dbSession, userUuids); assertThat(userUuidToManaged).containsExactlyInAnyOrderEntriesOf(Map.of("a", false, "b", false)); }
public AuthenticationFailedException() { super(); }
@Test public void testAuthenticationFailedException() { Assertions.assertThrowsExactly(AuthenticationFailedException.class, () -> { throw new AuthenticationFailedException(); }); Assertions.assertThrowsExactly(AuthenticationFailedException.class, () -> { throw new AuthenticationFailedException("error"); }); Assertions.assertThrowsExactly(AuthenticationFailedException.class, () -> { throw new AuthenticationFailedException(new Throwable("error")); }); Assertions.assertThrowsExactly(AuthenticationFailedException.class, () -> { throw new AuthenticationFailedException("error", new Throwable("error")); }); }
public ValidationResult validateMessagesAndAssignOffsets(PrimitiveRef.LongRef offsetCounter, MetricsRecorder metricsRecorder, BufferSupplier bufferSupplier) { if (sourceCompressionType == CompressionType.NONE && targetCompression.type() == CompressionType.NONE) { // check the magic value if (!records.hasMatchingMagic(toMagic)) return convertAndAssignOffsetsNonCompressed(offsetCounter, metricsRecorder); else // Do in-place validation, offset assignment and maybe set timestamp return assignOffsetsNonCompressed(offsetCounter, metricsRecorder); } else return validateMessagesAndAssignOffsetsCompressed(offsetCounter, metricsRecorder, bufferSupplier); }
@Test public void testDownConversionOfIdempotentRecordsNotPermitted() { long offset = 1234567; long producerId = 1344L; short producerEpoch = 16; int sequence = 0; MemoryRecords records = MemoryRecords.withIdempotentRecords(Compression.NONE, producerId, producerEpoch, sequence, new SimpleRecord("hello".getBytes()), new SimpleRecord("there".getBytes()), new SimpleRecord("beautiful".getBytes())); assertThrows(UnsupportedForMessageFormatException.class, () -> new LogValidator( records, new TopicPartition("topic", 0), time, CompressionType.GZIP, Compression.gzip().build(), false, RecordBatch.MAGIC_VALUE_V1, TimestampType.CREATE_TIME, 5000L, 5000L, RecordBatch.NO_PARTITION_LEADER_EPOCH, AppendOrigin.CLIENT, MetadataVersion.latestTesting() ).validateMessagesAndAssignOffsets( PrimitiveRef.ofLong(offset), metricsRecorder, RequestLocal.withThreadConfinedCaching().bufferSupplier() )); }
public int maxMessageLength() { return maxMessageLength; }
@Test void shouldReportMaxMessageLength() { assertEquals(FrameDescriptor.computeMaxMessageLength(TERM_LENGTH), publication.maxMessageLength()); }
public Matrix submatrix(int i, int j, int k, int l) { if (i < 0 || i >= m || k < i || k >= m || j < 0 || j >= n || l < j || l >= n) { throw new IllegalArgumentException(String.format("Invalid submatrix range (%d:%d, %d:%d) of %d x %d", i, k, j, l, m, n)); } Matrix sub = new Matrix(k - i + 1, l - j + 1); for (int jj = j; jj <= l; jj++) { for (int ii = i; ii <= k; ii++) { sub.set(ii - i, jj - j, get(ii, jj)); } } return sub; }
@Test public void testSubmatrix() { Matrix sub = matrix.submatrix(0, 1, 2, 2); assertEquals(3, sub.nrow()); assertEquals(2, sub.ncol()); assertEquals(0.4, sub.get(0,0), 1E-7); assertEquals(0.8, sub.get(2,1), 1E-7); Matrix sub2 = sub.submatrix(0, 0, 1, 1); assertEquals(2, sub2.nrow()); assertEquals(2, sub2.ncol()); assertEquals(0.4, sub.get(0,0), 1E-7); assertEquals(0.3, sub.get(1,1), 1E-7); }
static EditLogValidation scanEditLog(EditLogInputStream in, long maxTxIdToScan) { long lastPos; long lastTxId = HdfsServerConstants.INVALID_TXID; long numValid = 0; while (true) { long txid; lastPos = in.getPosition(); try { if ((txid = in.scanNextOp()) == HdfsServerConstants.INVALID_TXID) { break; } } catch (Throwable t) { FSImage.LOG.warn("Caught exception after scanning through " + numValid + " ops from " + in + " while determining its valid length. Position was " + lastPos, t); in.resync(); FSImage.LOG.warn("After resync, position is " + in.getPosition()); if (in.getPosition() <= lastPos) { FSImage.LOG.warn("After resync, the position, {} is not greater " + "than the previous position {}. Skipping remainder of this log.", in.getPosition(), lastPos); break; } continue; } if (lastTxId == HdfsServerConstants.INVALID_TXID || txid > lastTxId) { lastTxId = txid; } if (lastTxId >= maxTxIdToScan) { break; } numValid++; } return new EditLogValidation(lastPos, lastTxId, false); }
@Test public void testValidateEmptyEditLog() throws IOException { File testDir = new File(TEST_DIR, "testValidateEmptyEditLog"); SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap(); File logFile = prepareUnfinalizedTestEditLog(testDir, 0, offsetToTxId); // Truncate the file so that there is nothing except the header and // layout flags section. truncateFile(logFile, 8); EditLogValidation validation = EditLogFileInputStream.scanEditLog(logFile, Long.MAX_VALUE, true); assertTrue(!validation.hasCorruptHeader()); assertEquals(HdfsServerConstants.INVALID_TXID, validation.getEndTxId()); }
@CanIgnoreReturnValue public JsonWriter value(String value) throws IOException { if (value == null) { return nullValue(); } writeDeferredName(); beforeValue(); string(value); return this; }
@Test public void testMalformedNumbers() throws IOException { String[] malformedNumbers = { "some text", "", ".", "00", "01", "-00", "-", "--1", "+1", // plus sign is not allowed for integer part "+", "1,0", "1,000", "0.", // decimal digit is required ".1", // integer part is required "e1", ".e1", ".1e1", "1e-", "1e+", "1e--1", "1e+-1", "1e1e1", "1+e1", "1e1.0", }; for (String malformedNumber : malformedNumbers) { JsonWriter jsonWriter = new JsonWriter(new StringWriter()); var e = assertThrows( IllegalArgumentException.class, () -> jsonWriter.value(new LazilyParsedNumber(malformedNumber))); assertThat(e) .hasMessageThat() .isEqualTo( "String created by class com.google.gson.internal.LazilyParsedNumber is not a valid" + " JSON number: " + malformedNumber); } }
@Override public boolean prepareFormat() throws Exception { boolean ledgerRootExists = store.exists(ledgersRootPath).get(BLOCKING_CALL_TIMEOUT, MILLISECONDS); boolean availableNodeExists = store.exists(bookieRegistrationPath).get(BLOCKING_CALL_TIMEOUT, MILLISECONDS); // Create ledgers root node if not exists if (!ledgerRootExists) { store.put(ledgersRootPath, new byte[0], Optional.empty()) .get(BLOCKING_CALL_TIMEOUT, MILLISECONDS); } // create available bookies node if not exists if (!availableNodeExists) { store.put(bookieRegistrationPath, new byte[0], Optional.empty()) .get(BLOCKING_CALL_TIMEOUT, MILLISECONDS); } // create readonly bookies node if not exists if (!store.exists(bookieReadonlyRegistrationPath).get(BLOCKING_CALL_TIMEOUT, MILLISECONDS)) { store.put(bookieReadonlyRegistrationPath, new byte[0], Optional.empty()) .get(BLOCKING_CALL_TIMEOUT, MILLISECONDS); } return ledgerRootExists; }
@Test(dataProvider = "impl") public void testPrepareFormatNonExistingCluster(String provider, Supplier<String> urlSupplier) throws Exception { methodSetup(urlSupplier); assertFalse(registrationManager.prepareFormat()); }
public static ConfigurableResource parseResourceConfigValue(String value) throws AllocationConfigurationException { return parseResourceConfigValue(value, Long.MAX_VALUE); }
@Test public void testMemoryAndCpuPercentageNegativeValue() throws Exception { expectNegativePercentageOldStyle(); parseResourceConfigValue("-20% memory, -10% cpu"); }
@Override public Collection<DatabasePacket> execute() throws SQLException { PostgreSQLServerPreparedStatement preparedStatement = connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(packet.getStatementId()); ProxyDatabaseConnectionManager databaseConnectionManager = connectionSession.getDatabaseConnectionManager(); List<Object> parameters = preparedStatement.adjustParametersOrder(packet.readParameters(preparedStatement.getParameterTypes())); Portal portal = new Portal(packet.getPortal(), preparedStatement, parameters, packet.readResultFormats(), databaseConnectionManager); portalContext.add(portal); portal.bind(); return Collections.singleton(PostgreSQLBindCompletePacket.getInstance()); }
@Test void assertExecuteBindParameters() throws SQLException { String databaseName = "postgres"; ShardingSphereDatabase database = mock(ShardingSphereDatabase.class); when(database.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")); when(connectionSession.getServerPreparedStatementRegistry()).thenReturn(new ServerPreparedStatementRegistry()); ProxyDatabaseConnectionManager databaseConnectionManager = mock(ProxyDatabaseConnectionManager.class); when(databaseConnectionManager.getConnectionSession()).thenReturn(connectionSession); when(connectionSession.getDatabaseConnectionManager()).thenReturn(databaseConnectionManager); when(connectionSession.getCurrentDatabaseName()).thenReturn(databaseName); ConnectionContext connectionContext = mockConnectionContext(); when(connectionSession.getConnectionContext()).thenReturn(connectionContext); String statementId = "S_1"; List<Object> parameters = Arrays.asList(1, "updated_name"); PostgreSQLServerPreparedStatement serverPreparedStatement = new PostgreSQLServerPreparedStatement("update test set name = $2 where id = $1", new UnknownSQLStatementContext(new PostgreSQLEmptyStatement()), new HintValueContext(), Arrays.asList(PostgreSQLColumnType.VARCHAR, PostgreSQLColumnType.INT4), Arrays.asList(1, 0)); connectionSession.getServerPreparedStatementRegistry().addPreparedStatement(statementId, serverPreparedStatement); when(bindPacket.getStatementId()).thenReturn(statementId); when(bindPacket.getPortal()).thenReturn("C_1"); when(bindPacket.readParameters(anyList())).thenReturn(parameters); when(bindPacket.readResultFormats()).thenReturn(Collections.emptyList()); ContextManager contextManager = mock(ContextManager.class, Answers.RETURNS_DEEP_STUBS); when(contextManager.getDatabase(databaseName)).thenReturn(database); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); Collection<DatabasePacket> actual = executor.execute(); assertThat(actual.size(), is(1)); assertThat(actual.iterator().next(), is(PostgreSQLBindCompletePacket.getInstance())); }
public static boolean isTrue(Boolean value) { return value != null && value; }
@SuppressWarnings({"ConstantConditions", "SimplifiableJUnitAssertion"}) @Test public void testIsTrue() { assertEquals(false, TernaryLogic.isTrue(false)); assertEquals(true, TernaryLogic.isTrue(true)); assertEquals(false, TernaryLogic.isTrue(null)); }
public static String[][] assignExecutors( List<? extends ScanTaskGroup<?>> taskGroups, List<String> executorLocations) { Map<Integer, JavaHash<StructLike>> partitionHashes = Maps.newHashMap(); String[][] locations = new String[taskGroups.size()][]; for (int index = 0; index < taskGroups.size(); index++) { locations[index] = assign(taskGroups.get(index), executorLocations, partitionHashes); } return locations; }
@Test public void testUnknownTasks() { List<ScanTask> tasks = ImmutableList.of(new UnknownScanTask(), new UnknownScanTask()); ScanTaskGroup<ScanTask> taskGroup = new BaseScanTaskGroup<>(tasks); List<ScanTaskGroup<ScanTask>> taskGroups = ImmutableList.of(taskGroup); String[][] locations = SparkPlanningUtil.assignExecutors(taskGroups, EXECUTOR_LOCATIONS); // should not assign executors for unknown tasks assertThat(locations.length).isEqualTo(1); assertThat(locations[0]).isEmpty(); }
public static void main(String[] args) { // stew is mutable var stew = new Stew(1, 2, 3, 4); stew.mix(); stew.taste(); stew.mix(); // immutable stew protected with Private Class Data pattern var immutableStew = new ImmutableStew(2, 4, 3, 6); immutableStew.mix(); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
public CsvReader includeFields(boolean... fields) { if (fields == null || fields.length == 0) { throw new IllegalArgumentException( "The set of included fields must not be null or empty."); } int lastTruePos = -1; for (int i = 0; i < fields.length; i++) { if (fields[i]) { lastTruePos = i; } } if (lastTruePos == -1) { throw new IllegalArgumentException( "The description of fields to parse excluded all fields. At least one fields must be included."); } if (lastTruePos == fields.length - 1) { this.includedMask = fields; } else { this.includedMask = Arrays.copyOfRange(fields, 0, lastTruePos + 1); } return this; }
@Test void testIncludeFieldsSparse() { CsvReader reader = getCsvReader(); reader.includeFields(false, true, true, false, false, true, false, false); assertThat(reader.includedMask).containsExactly(false, true, true, false, false, true); reader = getCsvReader(); reader.includeFields("fttfftff"); assertThat(reader.includedMask).containsExactly(false, true, true, false, false, true); reader = getCsvReader(); reader.includeFields("FTTFFTFF"); assertThat(reader.includedMask).containsExactly(false, true, true, false, false, true); reader = getCsvReader(); reader.includeFields("01100100"); assertThat(reader.includedMask).containsExactly(false, true, true, false, false, true); reader = getCsvReader(); reader.includeFields("0t1f0TFF"); assertThat(reader.includedMask).containsExactly(false, true, true, false, false, true); reader = getCsvReader(); reader.includeFields(0x26L); assertThat(reader.includedMask).containsExactly(false, true, true, false, false, true); }
@Deprecated static Class<?> loadInjectorSourceFromProperties(Map<String, String> properties) { String injectorSourceClassName = properties.get(GUICE_INJECTOR_SOURCE_KEY); if (injectorSourceClassName == null) { return null; } log.warn( () -> format("The '%s' property has been deprecated." + "Add a class implementing '%s' on the glue path instead", GUICE_INJECTOR_SOURCE_KEY, InjectorSource.class.getName())); try { return Class.forName(injectorSourceClassName, true, Thread.currentThread().getContextClassLoader()); } catch (Exception e) { String message = format("Instantiation of '%s' failed. Check the caused by exception and ensure your " + "InjectorSource implementation is accessible and has a public zero args constructor.", injectorSourceClassName); throw new InjectorSourceInstantiationFailed(message, e); } }
@Test void instantiatesInjectorSourceByFullyQualifiedName() { Map<String, String> properties = new HashMap<>(); properties.put(GUICE_INJECTOR_SOURCE_KEY, CustomInjectorSource.class.getName()); Class<?> aClass = InjectorSourceFactory.loadInjectorSourceFromProperties(properties); assertThat(aClass, is(CustomInjectorSource.class)); }
public long getMaxAge() { return maxAge; }
@Test public void can_use_negative_values_as_max_age() { // When Cookie cookie = new Cookie.Builder("hello", "world").setMaxAge(-3600L).build(); // Then assertThat(cookie.getMaxAge()).isEqualTo(-3600L); }
public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } ValueParser parser = new ValueParser(new Parser(value)); return parser.parse(false); }
@Test public void shouldParseNullString() { SchemaAndValue schemaAndValue = Values.parseString(null); assertNull(schemaAndValue.schema()); assertNull(schemaAndValue.value()); }
@Override public void destroy() { cache.destroy(); }
@Test public void testDestroy() { adapter.destroy(); assertTrue(cache.isDestroyed()); }
@Override public Mono<LookupUsernameHashResponse> lookupUsernameHash(final LookupUsernameHashRequest request) { if (request.getUsernameHash().size() != AccountController.USERNAME_HASH_LENGTH) { throw Status.INVALID_ARGUMENT .withDescription(String.format("Illegal username hash length; expected %d bytes, but got %d bytes", AccountController.USERNAME_HASH_LENGTH, request.getUsernameHash().size())) .asRuntimeException(); } return RateLimitUtil.rateLimitByRemoteAddress(rateLimiters.getUsernameLookupLimiter()) .then(Mono.fromFuture(() -> accountsManager.getByUsernameHash(request.getUsernameHash().toByteArray()))) .map(maybeAccount -> maybeAccount.orElseThrow(Status.NOT_FOUND::asRuntimeException)) .map(account -> LookupUsernameHashResponse.newBuilder() .setServiceIdentifier(ServiceIdentifierUtil.toGrpcServiceIdentifier(new AciServiceIdentifier(account.getUuid()))) .build()); }
@Test void lookupUsernameHash() { final UUID accountIdentifier = UUID.randomUUID(); final byte[] usernameHash = TestRandomUtil.nextBytes(AccountController.USERNAME_HASH_LENGTH); final Account account = mock(Account.class); when(account.getUuid()).thenReturn(accountIdentifier); when(accountsManager.getByUsernameHash(usernameHash)) .thenReturn(CompletableFuture.completedFuture(Optional.of(account))); assertEquals(ServiceIdentifierUtil.toGrpcServiceIdentifier(new AciServiceIdentifier(accountIdentifier)), unauthenticatedServiceStub().lookupUsernameHash(LookupUsernameHashRequest.newBuilder() .setUsernameHash(ByteString.copyFrom(usernameHash)) .build()) .getServiceIdentifier()); //noinspection ResultOfMethodCallIgnored GrpcTestUtils.assertStatusException(Status.NOT_FOUND, () -> unauthenticatedServiceStub().lookupUsernameHash(LookupUsernameHashRequest.newBuilder() .setUsernameHash(ByteString.copyFrom(new byte[AccountController.USERNAME_HASH_LENGTH])) .build())); }
public static <K, V> WriteRecords<K, V> writeRecords() { return new AutoValue_KafkaIO_WriteRecords.Builder<K, V>() .setProducerConfig(WriteRecords.DEFAULT_PRODUCER_PROPERTIES) .setEOS(false) .setNumShards(0) .setConsumerFactoryFn(KafkaIOUtils.KAFKA_CONSUMER_FACTORY_FN) .setBadRecordRouter(BadRecordRouter.THROWING_ROUTER) .setBadRecordErrorHandler(new DefaultErrorHandler<>()) .build(); }
@Test public void testSinkProducerRecordsWithCustomPartition() throws Exception { int numElements = 1000; try (MockProducerWrapper producerWrapper = new MockProducerWrapper(new LongSerializer())) { ProducerSendCompletionThread completionThread = new ProducerSendCompletionThread(producerWrapper.mockProducer).start(); final String defaultTopic = "test"; final Integer partition = 1; p.apply(mkKafkaReadTransform(numElements, new ValueAsTimestampFn()).withoutMetadata()) .apply(ParDo.of(new KV2ProducerRecord(defaultTopic, partition))) .setCoder(ProducerRecordCoder.of(VarIntCoder.of(), VarLongCoder.of())) .apply( KafkaIO.<Integer, Long>writeRecords() .withBootstrapServers("none") .withKeySerializer(IntegerSerializer.class) .withValueSerializer(LongSerializer.class) .withProducerFactoryFn(new ProducerFactoryFn(producerWrapper.producerKey))); p.run(); completionThread.shutdown(); // Verify that messages are written with user-defined timestamp List<ProducerRecord<Integer, Long>> sent = producerWrapper.mockProducer.history(); for (int i = 0; i < numElements; i++) { ProducerRecord<Integer, Long> record = sent.get(i); assertEquals(defaultTopic, record.topic()); assertEquals(partition, record.partition()); assertEquals(i, record.key().intValue()); assertEquals(i, record.value().longValue()); } } }
protected static String buildRequestBody(String query, String operationName, JsonObject variables) { JsonObject jsonObject = new JsonObject(); jsonObject.put("query", query); jsonObject.put("operationName", operationName); jsonObject.put("variables", variables != null ? variables : new JsonObject()); return jsonObject.toJson(); }
@Test public void shouldBuildRequestBodyWithQuery() { String query = "queryText"; String body = GraphqlProducer.buildRequestBody(query, null, null); String expectedBody = "{" + "\"query\":\"queryText\"," + "\"operationName\":null," + "\"variables\":{}" + "}"; assertEquals(expectedBody, body); }
public Cookie decode(String header) { final int headerLen = checkNotNull(header, "header").length(); if (headerLen == 0) { return null; } CookieBuilder cookieBuilder = null; loop: for (int i = 0;;) { // Skip spaces and separators. for (;;) { if (i == headerLen) { break loop; } char c = header.charAt(i); if (c == ',') { // Having multiple cookies in a single Set-Cookie header is // deprecated, modern browsers only parse the first one break loop; } else if (c == '\t' || c == '\n' || c == 0x0b || c == '\f' || c == '\r' || c == ' ' || c == ';') { i++; continue; } break; } int nameBegin = i; int nameEnd; int valueBegin; int valueEnd; for (;;) { char curChar = header.charAt(i); if (curChar == ';') { // NAME; (no value till ';') nameEnd = i; valueBegin = valueEnd = -1; break; } else if (curChar == '=') { // NAME=VALUE nameEnd = i; i++; if (i == headerLen) { // NAME= (empty value, i.e. nothing after '=') valueBegin = valueEnd = 0; break; } valueBegin = i; // NAME=VALUE; int semiPos = header.indexOf(';', i); valueEnd = i = semiPos > 0 ? semiPos : headerLen; break; } else { i++; } if (i == headerLen) { // NAME (no value till the end of string) nameEnd = headerLen; valueBegin = valueEnd = -1; break; } } if (valueEnd > 0 && header.charAt(valueEnd - 1) == ',') { // old multiple cookies separator, skipping it valueEnd--; } if (cookieBuilder == null) { // cookie name-value pair DefaultCookie cookie = initCookie(header, nameBegin, nameEnd, valueBegin, valueEnd); if (cookie == null) { return null; } cookieBuilder = new CookieBuilder(cookie, header); } else { // cookie attribute cookieBuilder.appendAttribute(nameBegin, nameEnd, valueBegin, valueEnd); } } return cookieBuilder != null ? cookieBuilder.cookie() : null; }
@Test public void testDecodingSingleCookieV0() { String cookieString = "myCookie=myValue;expires=" + DateFormatter.format(new Date(System.currentTimeMillis() + 50000)) + ";path=/apathsomewhere;domain=.adomainsomewhere;secure;SameSite=None;Partitioned"; Cookie cookie = ClientCookieDecoder.STRICT.decode(cookieString); assertNotNull(cookie); assertEquals("myValue", cookie.value()); assertEquals(".adomainsomewhere", cookie.domain()); assertNotEquals(Long.MIN_VALUE, cookie.maxAge(), "maxAge should be defined when parsing cookie " + cookieString); assertTrue(cookie.maxAge() >= 40 && cookie.maxAge() <= 60, "maxAge should be about 50ms when parsing cookie " + cookieString); assertEquals("/apathsomewhere", cookie.path()); assertTrue(cookie.isSecure()); assertThat(cookie, is(instanceOf(DefaultCookie.class))); DefaultCookie c = (DefaultCookie) cookie; assertEquals(SameSite.None, c.sameSite()); assertTrue(c.isPartitioned()); }
public int sum(int... nums) { LOGGER.info("Arithmetic sum {}", VERSION); return source.accumulateSum(nums); }
@Test void testSum() { assertEquals(0, arithmetic.sum(-1, 0, 1)); }
public void closeAllResources() { synchronized (this) { closed.set(true); closeHandlers(true); closeConnections(true); } }
@Test void assertCloseAllResourcesInTransaction() throws SQLException { connectionSession.getTransactionStatus().setInTransaction(true); Connection cachedConnection = prepareCachedConnections(); databaseConnectionManager.closeAllResources(); assertTrue(databaseConnectionManager.getClosed().get()); verify(cachedConnection).rollback(); }
public Map<COSObjectKey, COSBase> parseAllObjects() throws IOException { Map<COSObjectKey, COSBase> allObjects = new HashMap<>(); try { Map<Integer, Long> objectNumbers = privateReadObjectOffsets(); // count the number of object numbers eliminating double entries long numberOfObjNumbers = objectNumbers.values().stream().distinct().count(); // the usage of the index should be restricted to cases where more than one // object use the same object number. // there are malformed pdfs in the wild which would lead to false results if // pdfbox always relies on the index if available. In most cases the object number // is sufficient to choose the correct object boolean indexNeeded = objectNumbers.size() > numberOfObjNumbers; long currentPosition = source.getPosition(); if (firstObject > 0 && currentPosition < firstObject) { source.skip(firstObject - (int) currentPosition); } int index = 0; for (Entry<Integer, Long> entry : objectNumbers.entrySet()) { COSObjectKey objectKey = getObjectKey(entry.getValue(), 0); // skip object if the index doesn't match if (indexNeeded && objectKey.getStreamIndex() > -1 && objectKey.getStreamIndex() != index) { index++; continue; } int finalPosition = firstObject + entry.getKey(); currentPosition = source.getPosition(); if (finalPosition > 0 && currentPosition < finalPosition) { // jump to the offset of the object to be parsed source.skip(finalPosition - (int) currentPosition); } COSBase streamObject = parseDirObject(); if (streamObject != null) { streamObject.setDirect(false); } allObjects.put(objectKey, streamObject); index++; } } finally { source.close(); document = null; } return allObjects; }
@Test void testParseAllObjectsSkipMalformedIndex() throws IOException { COSStream stream = new COSStream(); stream.setItem(COSName.N, COSInteger.THREE); stream.setItem(COSName.FIRST, COSInteger.get(13)); OutputStream outputStream = stream.createOutputStream(); outputStream.write("6 0 4 5 5 11 true false true".getBytes()); outputStream.close(); COSDocument cosDoc = new COSDocument(); Map<COSObjectKey, Long> xrefTable = cosDoc.getXrefTable(); // add an index for each object key which doesn't match with the index of the object stream xrefTable.put(new COSObjectKey(6, 0, 10), -1L); xrefTable.put(new COSObjectKey(4, 0, 11), -1L); xrefTable.put(new COSObjectKey(5, 0, 12), -1L); PDFObjectStreamParser objectStreamParser = new PDFObjectStreamParser(stream, cosDoc); // the index isn't taken into account as all object numbers of the stream are unique // none of the objects is skipped so that all objects are read and available Map<COSObjectKey, COSBase> objectNumbers = objectStreamParser.parseAllObjects(); assertEquals(3, objectNumbers.size()); assertEquals(COSBoolean.TRUE, objectNumbers.get(new COSObjectKey(6, 0))); assertEquals(COSBoolean.FALSE, objectNumbers.get(new COSObjectKey(4, 0))); assertEquals(COSBoolean.TRUE, objectNumbers.get(new COSObjectKey(5, 0))); }
public NumericIndicator minus(Indicator<Num> other) { return NumericIndicator.of(BinaryOperation.difference(this, other)); }
@Test public void minus() { final NumericIndicator numericIndicator = NumericIndicator.of(cp1); final NumericIndicator staticOp = numericIndicator.minus(5); assertNumEquals(1 - 5, staticOp.getValue(0)); assertNumEquals(9 - 5, staticOp.getValue(8)); final NumericIndicator dynamicOp = numericIndicator.minus(ema); assertNumEquals(cp1.getValue(0).minus(ema.getValue(0)), dynamicOp.getValue(0)); assertNumEquals(cp1.getValue(8).minus(ema.getValue(8)), dynamicOp.getValue(8)); }
public synchronized String createTopic(String topicName, int partitions) throws KafkaResourceManagerException { checkArgument(partitions > 0, "partitions must be positive."); String uniqueName = KafkaResourceManagerUtils.generateTopicName(topicName); try { Set<String> currentTopics = kafkaClient.listTopics().names().get(); if (!currentTopics.contains(uniqueName)) { kafkaClient .createTopics( Collections.singletonList(new NewTopic(uniqueName, partitions, (short) 1))) .all() .get(); topicNames.add(uniqueName); } } catch (Exception e) { throw new KafkaResourceManagerException("Error creating topics.", e); } LOG.info("Successfully created topic {}.", uniqueName); return uniqueName; }
@Test public void testCreateTopicShouldWork() throws ExecutionException, InterruptedException { when(kafkaClient.createTopics(anyCollection()).all().get()).thenReturn(null); assertNotNull(testManager.createTopic(TOPIC_NAME, 1)); }
public void updateGroupConfig(String groupId, Properties newGroupConfig) { if (null == groupId || groupId.isEmpty()) { throw new InvalidRequestException("Group name can't be empty."); } final GroupConfig newConfig = GroupConfig.fromProps( defaultConfig.originals(), newGroupConfig ); configMap.put(groupId, newConfig); }
@Test public void testUpdateGroupConfig() { String groupId = "foo"; Properties props = new Properties(); props.put(CONSUMER_SESSION_TIMEOUT_MS_CONFIG, 50000); props.put(CONSUMER_HEARTBEAT_INTERVAL_MS_CONFIG, 6000); configManager.updateGroupConfig(groupId, props); Optional<GroupConfig> configOptional = configManager.groupConfig(groupId); assertTrue(configOptional.isPresent()); GroupConfig config = configOptional.get(); assertEquals(50000, config.getInt(CONSUMER_SESSION_TIMEOUT_MS_CONFIG)); assertEquals(6000, config.getInt(CONSUMER_HEARTBEAT_INTERVAL_MS_CONFIG)); }
public static boolean isExpected(final Class<?> exceptionClass) { return EXCEPTIONS.stream().anyMatch(each -> each.isAssignableFrom(exceptionClass)); }
@Test void assertIsNotExpected() { assertFalse(ExpectedExceptions.isExpected(Exception.class)); assertFalse(ExpectedExceptions.isExpected(IllegalArgumentException.class)); assertFalse(ExpectedExceptions.isExpected(ShardingSphereInternalException.class)); }
public void hasLength(int expectedLength) { checkArgument(expectedLength >= 0, "expectedLength(%s) must be >= 0", expectedLength); check("length()").that(checkNotNull(actual).length()).isEqualTo(expectedLength); }
@Test public void hasLengthFails() { expectFailureWhenTestingThat("kurt").hasLength(5); assertFailureValue("value of", "string.length()"); }
public static LinearModel fit(Formula formula, DataFrame data, Properties params) { double lambda1 = Double.parseDouble(params.getProperty("smile.elastic_net.lambda1")); double lambda2 = Double.parseDouble(params.getProperty("smile.elastic_net.lambda2")); double tol = Double.parseDouble(params.getProperty("smile.elastic_net.tolerance", "1E-4")); int maxIter = Integer.parseInt(params.getProperty("smile.elastic_net.iterations", "1000")); return fit(formula, data, lambda1, lambda2, tol, maxIter); }
@Test public void tesAbalone() { System.out.println("Abalone"); RegressionValidation<LinearModel> result = RegressionValidation.of(Abalone.formula, Abalone.train, Abalone.test, (formula, data) -> ElasticNet.fit(formula, data, 0.8, 0.2)); System.out.println(result.model); System.out.println(result); assertEquals(2.1263, result.metrics.rmse, 1E-4); }
public static boolean isValidOrigin(String sourceHost, ZeppelinConfiguration zConf) throws UnknownHostException, URISyntaxException { String sourceUriHost = ""; if (sourceHost != null && !sourceHost.isEmpty()) { sourceUriHost = new URI(sourceHost).getHost(); sourceUriHost = (sourceUriHost == null) ? "" : sourceUriHost.toLowerCase(); } sourceUriHost = sourceUriHost.toLowerCase(); String currentHost = InetAddress.getLocalHost().getHostName().toLowerCase(); return zConf.getAllowedOrigins().contains("*") || currentHost.equals(sourceUriHost) || "localhost".equals(sourceUriHost) || zConf.getAllowedOrigins().contains(sourceHost); }
@Test void isLocalMachine() throws URISyntaxException, UnknownHostException { String origin = "http://" + InetAddress.getLocalHost().getHostName(); assertTrue(CorsUtils.isValidOrigin(origin, ZeppelinConfiguration.load()), "Origin " + origin + " is not allowed. Please check your hostname."); }
@Override public AlterConsumerGroupOffsetsResult alterConsumerGroupOffsets( String groupId, Map<TopicPartition, OffsetAndMetadata> offsets, AlterConsumerGroupOffsetsOptions options ) { SimpleAdminApiFuture<CoordinatorKey, Map<TopicPartition, Errors>> future = AlterConsumerGroupOffsetsHandler.newFuture(groupId); AlterConsumerGroupOffsetsHandler handler = new AlterConsumerGroupOffsetsHandler(groupId, offsets, logContext); invokeDriver(handler, future, options.timeoutMs); return new AlterConsumerGroupOffsetsResult(future.get(CoordinatorKey.byGroupId(groupId))); }
@Test public void testOffsetCommitWithMultipleErrors() throws Exception { final Cluster cluster = mockCluster(3, 0); final Time time = new MockTime(); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(time, cluster, AdminClientConfig.RETRIES_CONFIG, "0")) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); final TopicPartition foo0 = new TopicPartition("foo", 0); final TopicPartition foo1 = new TopicPartition("foo", 1); env.kafkaClient().prepareResponse( prepareFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); Map<TopicPartition, Errors> responseData = new HashMap<>(); responseData.put(foo0, Errors.NONE); responseData.put(foo1, Errors.UNKNOWN_TOPIC_OR_PARTITION); env.kafkaClient().prepareResponse(new OffsetCommitResponse(0, responseData)); Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>(); offsets.put(foo0, new OffsetAndMetadata(123L)); offsets.put(foo1, new OffsetAndMetadata(456L)); final AlterConsumerGroupOffsetsResult result = env.adminClient() .alterConsumerGroupOffsets(GROUP_ID, offsets); assertNull(result.partitionResult(foo0).get()); TestUtils.assertFutureError(result.partitionResult(foo1), UnknownTopicOrPartitionException.class); TestUtils.assertFutureError(result.all(), UnknownTopicOrPartitionException.class); } }
public static List<String> notEmptyElements(List<String> list, String name) { notNull(list, name); for (int i = 0; i < list.size(); i++) { notEmpty(list.get(i), MessageFormat.format("list [{0}] element [{1}]", name, i)); } return list; }
@Test(expected = IllegalArgumentException.class) public void notEmptyElementsEmptyElements() { Check.notEmptyElements(Arrays.asList("a", ""), "name"); }
@Override public boolean isDirectory() { return pathString.endsWith(File.separator); }
@Test public void testIsDirectory() { // TODO: Java core test failing on windows, https://github.com/apache/beam/issues/20480 assumeFalse(SystemUtils.IS_OS_WINDOWS); assertTrue(toResourceIdentifier("/").isDirectory()); assertTrue(toResourceIdentifier("/root/tmp/").isDirectory()); assertFalse(toResourceIdentifier("/root").isDirectory()); }
@Override public String getPrefix() { return String.format("%s.%s", HubicProtocol.class.getPackage().getName(), "Hubic"); }
@Test public void testPrefix() { assertEquals("ch.cyberduck.core.hubic.Hubic", new HubicProtocol().getPrefix()); }
public static Map<String, Map<String, String>> revertNotify(Map<String, Map<String, String>> notify) { if (notify != null && notify.size() > 0) { Map<String, Map<String, String>> newNotify = new HashMap<>(); for (Map.Entry<String, Map<String, String>> entry : notify.entrySet()) { String serviceName = entry.getKey(); Map<String, String> serviceUrls = entry.getValue(); if (StringUtils.isNotContains(serviceName, ':') && StringUtils.isNotContains(serviceName, '/')) { if (CollectionUtils.isNotEmptyMap(serviceUrls)) { for (Map.Entry<String, String> entry2 : serviceUrls.entrySet()) { String url = entry2.getKey(); String query = entry2.getValue(); Map<String, String> params = StringUtils.parseQueryString(query); String group = params.get(GROUP_KEY); String version = params.get(VERSION_KEY); // params.remove("group"); // params.remove("version"); String name = serviceName; if (StringUtils.isNotEmpty(group)) { name = group + "/" + name; } if (StringUtils.isNotEmpty(version)) { name = name + ":" + version; } Map<String, String> newUrls = newNotify.computeIfAbsent(name, k -> new HashMap<>()); newUrls.put(url, StringUtils.toQueryString(params)); } } } else { newNotify.put(serviceName, serviceUrls); } } return newNotify; } return notify; }
@Test void testRevertNotify() { String key = "dubbo.test.api.HelloService"; Map<String, Map<String, String>> notify = new HashMap<String, Map<String, String>>(); Map<String, String> service = new HashMap<String, String>(); service.put("dubbo://127.0.0.1:20880/com.xxx.XxxService", "group=perf&version=1.0.0"); notify.put(key, service); Map<String, Map<String, String>> newRegister = UrlUtils.revertNotify(notify); Map<String, Map<String, String>> expectedRegister = new HashMap<String, Map<String, String>>(); service.put("dubbo://127.0.0.1:20880/com.xxx.XxxService", "group=perf&version=1.0.0"); expectedRegister.put("perf/dubbo.test.api.HelloService:1.0.0", service); assertEquals(expectedRegister, newRegister); }
Maybe<Post> findById(UUID id) { return findAll().filter(p -> p.getId().equals(id)) .singleElement() .switchIfEmpty(Maybe.error(new PostNotFoundException(id))); }
@Test void findById() { var testObserver = new TestObserver<Post>(); this.posts.findById(UUID.randomUUID()).subscribe(testObserver); testObserver.assertError(PostNotFoundException.class); testObserver.assertNotComplete(); }
public void checkIfAnyComponentsNeedIssueSync(DbSession dbSession, List<String> componentKeys) { boolean isAppOrViewOrSubview = dbClient.componentDao().existAnyOfComponentsWithQualifiers(dbSession, componentKeys, APP_VIEW_OR_SUBVIEW); boolean needIssueSync; if (isAppOrViewOrSubview) { needIssueSync = dbClient.branchDao().hasAnyBranchWhereNeedIssueSync(dbSession, true); } else { needIssueSync = dbClient.branchDao().doAnyOfComponentsNeedIssueSync(dbSession, componentKeys); } if (needIssueSync) { throw new EsIndexSyncInProgressException(IssueIndexDefinition.TYPE_ISSUE.getMainType(), "Results are temporarily unavailable. Indexing of issues is in progress."); } }
@Test public void checkIfAnyComponentsNeedIssueSync_does_not_throw_exception_if_all_components_have_need_issue_sync_FALSE() { underTest.checkIfAnyComponentsNeedIssueSync(db.getSession(), Collections.emptyList()); ProjectData projectData1 = insertProjectWithBranches(false, 0); ProjectData projectData2 = insertProjectWithBranches(false, 0); underTest.checkIfAnyComponentsNeedIssueSync(db.getSession(), Arrays.asList(projectData1.getProjectDto().getKey(), projectData2.getProjectDto().getKey())); }
private void initParam(NacosClientProperties properties) { initServerAddr(properties); initNameSpace(properties); initEndpoint(properties); initEndpointPort(properties); initEndpointContextPath(properties); initContextPath(properties); initServerListName(properties); }
@Test void testInitParam() throws NacosException, NoSuchFieldException, IllegalAccessException { Properties properties = new Properties(); String endpoint = "127.0.0.1"; properties.setProperty(PropertyKeyConst.ENDPOINT, endpoint); String endpointPort = "9090"; properties.setProperty(PropertyKeyConst.ENDPOINT_PORT, endpointPort); String endpointContextPath = "/endpointContextPath"; properties.setProperty(PropertyKeyConst.ENDPOINT_CONTEXT_PATH, endpointContextPath); String contextPath = "/contextPath"; properties.setProperty(PropertyKeyConst.CONTEXT_PATH, contextPath); final NacosClientProperties clientProperties = NacosClientProperties.PROTOTYPE.derive(properties); ServerListManager serverListManager = new ServerListManager(clientProperties); Field endpointField = ServerListManager.class.getDeclaredField("endpoint"); endpointField.setAccessible(true); String fieldEndpoint = (String) endpointField.get(serverListManager); assertEquals(endpoint, fieldEndpoint); Field endpointPortField = ServerListManager.class.getDeclaredField("endpointPort"); endpointPortField.setAccessible(true); String fieldEndpointPort = String.valueOf(endpointPortField.get(serverListManager)); assertEquals(endpointPort, fieldEndpointPort); Field endpointContextPathField = ServerListManager.class.getDeclaredField("endpointContextPath"); endpointContextPathField.setAccessible(true); String fieldEndpointContextPath = String.valueOf(endpointContextPathField.get(serverListManager)); assertEquals(endpointContextPath, fieldEndpointContextPath); Field contentPathField = ServerListManager.class.getDeclaredField("contentPath"); contentPathField.setAccessible(true); String fieldContentPath = String.valueOf(contentPathField.get(serverListManager)); assertEquals(fieldContentPath, contextPath); }
boolean matchesNonValueField(final Optional<SourceName> source, final ColumnName column) { if (!source.isPresent()) { return sourceSchemas.values().stream() .anyMatch(schema -> SystemColumns.isPseudoColumn(column) || schema.isKeyColumn(column)); } final SourceName sourceName = source.get(); final LogicalSchema sourceSchema = sourceSchemas.get(sourceName); if (sourceSchema == null) { throw new IllegalArgumentException("Unknown source: " + sourceName); } return sourceSchema.isKeyColumn(column) || SystemColumns.isPseudoColumn(column); }
@Test public void shouldMatchNonValueFieldNameIfMetaField() { assertThat(sourceSchemas.matchesNonValueField(Optional.empty(), SystemColumns.ROWTIME_NAME), is(true)); }
public static boolean isNumber(String str) { if (str == null || str.length() == 0) { return false; } int sz = str.length(); boolean hasExp = false; boolean hasDecPoint = false; boolean allowSigns = false; boolean foundDigit = false; // deal with any possible sign up front int start = (str.charAt(0) == '-') ? 1 : 0; if (sz > start + 1) { if (str.charAt(start) == '0' && str.charAt(start + 1) == 'x') { int i = start + 2; if (i == sz) { return false; // str == "0x" } // checking hex (it can't be anything else) for (; i < str.length(); i++) { char ch = str.charAt(i); if ((ch < '0' || ch > '9') && (ch < 'a' || ch > 'f') && (ch < 'A' || ch > 'F')) { return false; } } return true; } } sz--; // don't want to loop to the last char, check it afterwords // for type qualifiers int i = start; // loop to the next to last char or to the last char if we need another digit to // make a valid number (e.g. chars[0..5] = "1234E") while (i < sz || (i < sz + 1 && allowSigns && !foundDigit)) { char ch = str.charAt(i); if (ch >= '0' && ch <= '9') { foundDigit = true; allowSigns = false; } else if (ch == '.') { if (hasDecPoint || hasExp) { // two decimal points or dec in exponent return false; } hasDecPoint = true; } else if (ch == 'e' || ch == 'E') { // we've already taken care of hex. if (hasExp) { // two E's return false; } if (!foundDigit) { return false; } hasExp = true; allowSigns = true; } else if (ch == '+' || ch == '-') { if (!allowSigns) { return false; } allowSigns = false; foundDigit = false; // we need a digit after the E } else { return false; } i++; } if (i < str.length()) { char ch = str.charAt(i); if (ch >= '0' && ch <= '9') { // no type qualifier, OK return true; } if (ch == 'e' || ch == 'E') { // can't have an E at the last byte return false; } if (!allowSigns && (ch == 'd' || ch == 'D' || ch == 'f' || ch == 'F')) { return foundDigit; } if (ch == 'l' || ch == 'L') { // not allowing L with an exponent return foundDigit && !hasExp; } // last character is illegal return false; } // allowSigns is true iff the val ends in 'E' // found digit it to make sure weird stuff like '.' and '1E-' doesn't pass return !allowSigns && foundDigit; }
@Test public void testIsNumberCharArrayInput() { Assert.assertTrue( StringUtils.isNumber(new char[]{'-', '0', 'x', 'a'})); Assert.assertTrue(StringUtils.isNumber(new char[]{'-', '.', '1'})); Assert.assertTrue(StringUtils.isNumber(new char[]{'6', 'l'})); Assert.assertTrue( StringUtils.isNumber(new char[]{'0', 'e', '+', '3'})); Assert.assertFalse(StringUtils.isNumber(new char[0])); Assert.assertFalse(StringUtils.isNumber(new char[]{'0', 'x'})); Assert.assertFalse( StringUtils.isNumber(new char[]{'-', '0', 'x', '9', ' '})); Assert.assertFalse( StringUtils.isNumber(new char[]{'-', '0', 'x', '9', 'i'})); Assert.assertFalse( StringUtils.isNumber(new char[]{'-', '.', '.', 'a'})); Assert.assertFalse(StringUtils.isNumber( new char[]{'-', '1', 'e', 'E', '\u0000', '\u0000', '\u0000'})); Assert.assertFalse( StringUtils.isNumber(new char[]{'-', '.', 'E', 'a'})); Assert.assertFalse(StringUtils.isNumber(new char[]{'+', '\u0016'})); Assert.assertFalse(StringUtils.isNumber(new char[]{';', '\u0016'})); Assert.assertFalse(StringUtils.isNumber(new char[]{'-', '9', 'e'})); Assert.assertFalse(StringUtils.isNumber(new char[]{'-', '.', 'f'})); Assert.assertFalse(StringUtils.isNumber(new char[]{'-', '.', 'F'})); Assert.assertFalse(StringUtils.isNumber(new char[]{'-', '.', 'd'})); Assert.assertFalse(StringUtils.isNumber(new char[]{'-', '.', 'D'})); Assert.assertFalse(StringUtils.isNumber(new char[]{'-', '.', 'l'})); Assert.assertFalse(StringUtils.isNumber(new char[]{'5', 't'})); Assert.assertFalse(StringUtils.isNumber(new char[]{'-'})); }
@Override public Instance selectOneHealthyInstance(String serviceName) throws NacosException { return selectOneHealthyInstance(serviceName, new ArrayList<>()); }
@Test void testSelectOneHealthyInstance3() throws NacosException { //given Instance healthyInstance = new Instance(); healthyInstance.setIp("1.1.1.1"); healthyInstance.setPort(1000); List<Instance> hosts = new ArrayList<>(); hosts.add(healthyInstance); ServiceInfo infoWithHealthyInstance = new ServiceInfo(); infoWithHealthyInstance.setHosts(hosts); when(proxy.queryInstancesOfService(anyString(), anyString(), anyString(), anyBoolean())).thenReturn( infoWithHealthyInstance); String serviceName = "service1"; //when client.selectOneHealthyInstance(serviceName, false); //then verify(proxy, times(1)).queryInstancesOfService(serviceName, Constants.DEFAULT_GROUP, "", false); }
public boolean isCapitalModeNaming(@NotNull String word) { return isCapitalMode && StringUtils.isCapitalMode(word); }
@Test void isCapitalModeNamingTest() { Assertions.assertFalse(GeneratorBuilder.strategyConfig().isCapitalModeNaming("T_USER")); Assertions.assertFalse(GeneratorBuilder.strategyConfigBuilder().enableCapitalMode().build().isCapitalModeNaming("user")); Assertions.assertFalse(GeneratorBuilder.strategyConfigBuilder().enableCapitalMode().build().isCapitalModeNaming("user_name")); Assertions.assertTrue(GeneratorBuilder.strategyConfigBuilder().enableCapitalMode().build().isCapitalModeNaming("USER_NAME")); Assertions.assertTrue(GeneratorBuilder.strategyConfigBuilder().enableCapitalMode().build().isCapitalModeNaming("T_USER")); Assertions.assertTrue(GeneratorBuilder.strategyConfigBuilder().enableCapitalMode().build().isCapitalModeNaming("NAME")); }
public static ByteBufFlux fromString(Publisher<? extends String> source) { return fromString(source, Charset.defaultCharset(), ByteBufAllocator.DEFAULT); }
@Test void testFromString_Flux() { List<String> original = Arrays.asList("1", "2", "3"); StepVerifier.create(ByteBufFlux.fromString(Flux.fromIterable(original)).collectList()) .expectNextMatches(list -> { List<String> newList = list.stream() .map(b -> { String result = b.toString(Charset.defaultCharset()); b.release(); return result; }) .collect(Collectors.toList()); return Objects.equals(original, newList); }) .expectComplete() .verify(Duration.ofSeconds(30)); }
public static boolean isNullOrEmpty(final Collection<?> list) { return list == null || list.size() == 0; }
@Test public void testIsNullOrEmptyString() { assertTrue(Utils.isNullOrEmpty((String) null)); assertTrue(Utils.isNullOrEmpty("")); assertFalse(Utils.isNullOrEmpty("null")); assertFalse(Utils.isNullOrEmpty("empty")); assertFalse(Utils.isNullOrEmpty("this is a string")); }
boolean hasIdentifier(Identifier id) { return values.containsKey(id); }
@Test final void testHasIdentifier() { for (int i = 0; i < 4; ++i) { bucket.put(new Sample(new Measurement(i), new Identifier("nalle_" + i, new Point( new ImmutableMap.Builder<String, Integer>().put(String.valueOf(i), Integer.valueOf(i)).build())), AssumedType.GAUGE)); } for (int i = 0; i < 4; ++i) { assertTrue(bucket.hasIdentifier(new Identifier("nalle_" + i, new Point(new ImmutableMap.Builder<String, Integer>().put( String.valueOf(i), Integer.valueOf(i)).build())))); } }
@Override public ClientHttpResponse intercept(HttpRequest request, byte[] body, ClientHttpRequestExecution execution) throws IOException { if (!request.getHeaders().containsKey(HttpHeaders.AUTHORIZATION)) { OAuth2AuthorizedClient authorizedClient = this.authorizedClientManager.authorize( OAuth2AuthorizeRequest.withClientRegistrationId(this.registrationId) .principal(this.securityContextHolderStrategy.getContext().getAuthentication()) .build()); request.getHeaders().setBearerAuth(authorizedClient.getAccessToken().getTokenValue()); } return execution.execute(request, body); }
@Test void intercept_AuthorizationHeaderIsNotSet_AddsAuthorizationHeader() throws IOException { // given var request = new MockClientHttpRequest(); var body = new byte[0]; var execution = mock(ClientHttpRequestExecution.class); var response = new MockClientHttpResponse(); var authentication = new TestingAuthenticationToken("j.dewar", "password"); SecurityContextHolder.getContext().setAuthentication(authentication); var authorizedClient = new OAuth2AuthorizedClient(mock(), "j.dewar", new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "token", Instant.now(), Instant.MAX)); doReturn(authorizedClient).when(this.authorizedClientManager) .authorize(argThat(authorizationRequest -> authorizationRequest.getPrincipal().equals(authentication) && authorizationRequest.getClientRegistrationId().equals("test"))); doReturn(response).when(execution).execute(request, body); // when var result = this.interceptor.intercept(request, body, execution); // then assertEquals(response, result); assertEquals("Bearer token", request.getHeaders().getFirst(HttpHeaders.AUTHORIZATION)); verify(execution).execute(request, body); verifyNoMoreInteractions(execution); }
@Override public void setPreparedStatementValue( DatabaseMeta databaseMeta, PreparedStatement preparedStatement, int index, Object data ) throws KettleDatabaseException { try { switch ( getType() ) { case ValueMetaInterface.TYPE_NUMBER: if ( !isNull( data ) ) { double num = getNumber( data ).doubleValue(); if ( databaseMeta.supportsFloatRoundingOnUpdate() && getPrecision() >= 0 ) { num = Const.round( num, getPrecision() ); } preparedStatement.setDouble( index, num ); } else { preparedStatement.setNull( index, java.sql.Types.DOUBLE ); } break; case ValueMetaInterface.TYPE_INTEGER: if ( !isNull( data ) ) { if ( databaseMeta.supportsSetLong() ) { preparedStatement.setLong( index, getInteger( data ).longValue() ); } else { double d = getNumber( data ).doubleValue(); if ( databaseMeta.supportsFloatRoundingOnUpdate() && getPrecision() >= 0 ) { preparedStatement.setDouble( index, d ); } else { preparedStatement.setDouble( index, Const.round( d, getPrecision() ) ); } } } else { preparedStatement.setNull( index, java.sql.Types.INTEGER ); } break; case ValueMetaInterface.TYPE_STRING: if ( !isNull( data ) ) { if ( getLength() == DatabaseMeta.CLOB_LENGTH ) { setLength( databaseMeta.getMaxTextFieldLength() ); } if ( getLength() <= databaseMeta.getMaxTextFieldLength() ) { preparedStatement.setString( index, getString( data ) ); } else { String string = getString( data ); int maxlen = databaseMeta.getMaxTextFieldLength(); int len = string.length(); // Take the last maxlen characters of the string... int begin = Math.max( len - maxlen, 0 ); if ( begin > 0 ) { // Truncate if logging result if it exceeds database maximum string field length log.logMinimal( String.format( "Truncating %d symbols of original message in '%s' field", begin, getName() ) ); string = string.substring( begin ); } if ( databaseMeta.supportsSetCharacterStream() ) { preparedStatement.setCharacterStream( index, new StringReader( string ), string.length() ); } else { preparedStatement.setString( index, string ); } } } else { preparedStatement.setNull( index, java.sql.Types.VARCHAR ); } break; case ValueMetaInterface.TYPE_DATE: if ( !isNull( data ) ) { // Environment variable to disable timezone setting for the database updates // When it is set, timezone will not be taken into account and the value will be converted // into the local java timezone if ( getPrecision() == 1 || !databaseMeta.supportsTimeStampToDateConversion() ) { // Convert to DATE! long dat = getInteger( data ).longValue(); // converts using Date.getTime() java.sql.Date ddate = new java.sql.Date( dat ); if ( ignoreTimezone || this.getDateFormatTimeZone() == null ) { preparedStatement.setDate( index, ddate ); } else { preparedStatement.setDate( index, ddate, Calendar.getInstance( this.getDateFormatTimeZone() ) ); } } else { if ( data instanceof java.sql.Timestamp ) { // Preserve ns precision! // if ( ignoreTimezone || this.getDateFormatTimeZone() == null ) { preparedStatement.setTimestamp( index, (java.sql.Timestamp) data ); } else { preparedStatement.setTimestamp( index, (java.sql.Timestamp) data, Calendar.getInstance( this .getDateFormatTimeZone() ) ); } } else { long dat = getInteger( data ).longValue(); // converts using Date.getTime() java.sql.Timestamp sdate = new java.sql.Timestamp( dat ); if ( ignoreTimezone || this.getDateFormatTimeZone() == null ) { preparedStatement.setTimestamp( index, sdate ); } else { preparedStatement.setTimestamp( index, sdate, Calendar.getInstance( this.getDateFormatTimeZone() ) ); } } } } else { if ( getPrecision() == 1 || !databaseMeta.supportsTimeStampToDateConversion() ) { preparedStatement.setNull( index, java.sql.Types.DATE ); } else { preparedStatement.setNull( index, java.sql.Types.TIMESTAMP ); } } break; case ValueMetaInterface.TYPE_BOOLEAN: if ( databaseMeta.supportsBooleanDataType() ) { if ( !isNull( data ) ) { preparedStatement.setBoolean( index, getBoolean( data ).booleanValue() ); } else { preparedStatement.setNull( index, java.sql.Types.BOOLEAN ); } } else { if ( !isNull( data ) ) { preparedStatement.setString( index, getBoolean( data ).booleanValue() ? "Y" : "N" ); } else { preparedStatement.setNull( index, java.sql.Types.CHAR ); } } break; case ValueMetaInterface.TYPE_BIGNUMBER: if ( !isNull( data ) ) { preparedStatement.setBigDecimal( index, getBigNumber( data ) ); } else { preparedStatement.setNull( index, java.sql.Types.DECIMAL ); } break; case ValueMetaInterface.TYPE_BINARY: if ( !isNull( data ) ) { preparedStatement.setBytes( index, getBinary( data ) ); } else { preparedStatement.setNull( index, java.sql.Types.BINARY ); } break; default: // placeholder preparedStatement.setNull( index, java.sql.Types.VARCHAR ); break; } } catch ( Exception e ) { throw new KettleDatabaseException( "Error setting value #" + index + " [" + toStringMeta() + "] on prepared statement", e ); } }
@Test public void testSetPreparedStatementStringValueDontLogTruncated() throws KettleDatabaseException { ValueMetaBase valueMetaString = new ValueMetaBase( "LOG_FIELD", ValueMetaInterface.TYPE_STRING, LOG_FIELD.length(), 0 ); DatabaseMeta databaseMeta = mock( DatabaseMeta.class ); PreparedStatement preparedStatement = mock( PreparedStatement.class ); when( databaseMeta.getMaxTextFieldLength() ).thenReturn( LOG_FIELD.length() ); List<KettleLoggingEvent> events = listener.getEvents(); assertEquals( 0, events.size() ); valueMetaString.setPreparedStatementValue( databaseMeta, preparedStatement, 0, LOG_FIELD ); //no logging occurred as max string length equals to logging text length assertEquals( 0, events.size() ); }
@Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { URL url = invoker.getUrl(); boolean shouldAuth = url.getParameter(Constants.SERVICE_AUTH, false); if (shouldAuth) { Authenticator authenticator = applicationModel .getExtensionLoader(Authenticator.class) .getExtension(url.getParameter(Constants.AUTHENTICATOR, Constants.DEFAULT_AUTHENTICATOR)); try { authenticator.authenticate(invocation, url); } catch (Exception e) { return AsyncRpcResult.newDefaultAsyncResult(e, invocation); } } return invoker.invoke(invocation); }
@Test void testAuthFailed() { URL url = URL.valueOf("dubbo://10.10.10.10:2181") .addParameter(Constants.ACCESS_KEY_ID_KEY, "ak") .addParameter(Constants.SECRET_ACCESS_KEY_KEY, "sk") .addParameter(CommonConstants.APPLICATION_KEY, "test") .addParameter(Constants.SERVICE_AUTH, true); Invoker invoker = mock(Invoker.class); Invocation invocation = mock(RpcInvocation.class); when(invocation.getAttachment(Constants.REQUEST_SIGNATURE_KEY)).thenReturn(null); when(invoker.getUrl()).thenReturn(url); ProviderAuthFilter providerAuthFilter = new ProviderAuthFilter(ApplicationModel.defaultModel()); Result result = providerAuthFilter.invoke(invoker, invocation); assertTrue(result.hasException()); }
public void reset() { this.peers.clear(); this.learners.clear(); }
@Test public void testReset() { final Configuration conf = JRaftUtils.getConfiguration("localhost:8081,localhost:8082,localhost:8083"); assertFalse(conf.isEmpty()); conf.reset(); assertTrue(conf.isEmpty()); assertTrue(conf.getPeerSet().isEmpty()); }
public Optional<Object> evaluate(final Map<String, Object> columnPairsMap, final String outputColumn, final String regexField) { boolean matching = true; boolean isRegex = regexField != null && columnValues.containsKey(regexField) && (boolean) columnValues.get(regexField); for (Map.Entry<String, Object> columnPairEntry : columnPairsMap.entrySet()) { Object value = columnValues.get(columnPairEntry.getKey()); matching = isRegex ? isRegexMatching(value.toString(), (String) columnPairEntry.getValue()) : isMatching(value, columnPairEntry.getValue()); if (!matching) { break; } } return matching ? Optional.ofNullable(columnValues.get(outputColumn)) : Optional.empty(); }
@Test void evaluateKeyFoundMultipleMatching() { KiePMMLRow kiePMMLRow = new KiePMMLRow(COLUMN_VALUES); Map<String, Object> columnPairsMap = IntStream.range(0, 3).boxed() .collect(Collectors.toMap(i -> "KEY-" + i, integer -> integer)); Optional<Object> retrieved = kiePMMLRow.evaluate(columnPairsMap, "KEY-0", null); assertThat(retrieved).isPresent(); assertThat(retrieved.get()).isEqualTo(COLUMN_VALUES.get("KEY-0")); }
public byte[] verifyAuthenticate(byte[] seed, byte[] result) throws RdaException { final SecureMessaging sm = new TDEASecureMessaging(seed, 0, 16, null); final byte[] calculatedMac = sm.mac( m -> m.update(result, 0, 32)); if (!CryptoUtils.compare(calculatedMac, result, 32)) { throw new RdaException(RdaError.AUTHENTICATE, "Invalid MAC"); } return sm.decrypt(false, false, result, 0, 32); }
@Test public void shouldVerifyAuthenticate() throws Exception { final CardVerifier verifier = verifier(null, null); final byte[] seed = Hex.decode("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS"); final byte[] result = Hex.decode( "SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS" ); assertEquals("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS", Hex.toHexString(verifier.verifyAuthenticate(seed, result)) ); }
public ZonedDateTime getLastSignInOrActivatedAtOrCreatedAt() { if (this.getLastSignInAt() != null) { return this.getLastSignInAt(); } else if (this.getActivatedAt() != null) { return this.getActivatedAt(); } return this.getCreatedAt(); }
@Test void testGetLastSignInOrActivatedAtOrCreatedAt() { var currentDate = ZonedDateTime.now(); var appAuthenticator = new AppAuthenticator(); appAuthenticator.setCreatedAt(currentDate); assertEquals(currentDate, appAuthenticator.getLastSignInOrActivatedAtOrCreatedAt()); }
@Override public List<String> assignSegment(String segmentName, Map<String, Map<String, String>> currentAssignment, InstancePartitions instancePartitions, InstancePartitionsType instancePartitionsType) { int numPartitions = instancePartitions.getNumPartitions(); checkReplication(instancePartitions, _replication, _tableName); int partitionId; if (_partitionColumn == null || numPartitions == 1) { partitionId = 0; } else { // Uniformly spray the segment partitions over the instance partitions if (_tableConfig.getTableType() == TableType.OFFLINE) { partitionId = SegmentAssignmentUtils .getOfflineSegmentPartitionId(segmentName, _tableName, _helixManager, _partitionColumn) % numPartitions; } else { partitionId = SegmentAssignmentUtils .getRealtimeSegmentPartitionId(segmentName, _tableName, _helixManager, _partitionColumn) % numPartitions; } } return SegmentAssignmentUtils.assignSegmentWithReplicaGroup(currentAssignment, instancePartitions, partitionId); }
@Test public void testTableBalancedWithoutPartition() { Map<String, Map<String, String>> currentAssignment = new TreeMap<>(); for (String segmentName : SEGMENTS) { List<String> instancesAssigned = _segmentAssignmentWithoutPartition .assignSegment(segmentName, currentAssignment, _instancePartitionsMapWithoutPartition); currentAssignment .put(segmentName, SegmentAssignmentUtils.getInstanceStateMap(instancesAssigned, SegmentStateModel.ONLINE)); } assertEquals(currentAssignment.size(), NUM_SEGMENTS); // Each segment should have 3 replicas for (Map<String, String> instanceStateMap : currentAssignment.values()) { assertEquals(instanceStateMap.size(), NUM_REPLICAS); } int[] numSegmentsAssignedPerInstance = SegmentAssignmentUtils.getNumSegmentsAssignedPerInstance(currentAssignment, INSTANCES); int[] expectedNumSegmentsAssignedPerInstance = new int[NUM_INSTANCES]; int numSegmentsPerInstance = NUM_SEGMENTS * NUM_REPLICAS / NUM_INSTANCES; Arrays.fill(expectedNumSegmentsAssignedPerInstance, numSegmentsPerInstance); assertEquals(numSegmentsAssignedPerInstance, expectedNumSegmentsAssignedPerInstance); // Current assignment should already be balanced assertEquals( _segmentAssignmentWithoutPartition.rebalanceTable(currentAssignment, _instancePartitionsMapWithoutPartition, null, null, new RebalanceConfig()), currentAssignment); }
protected Map<String, String> parseJettyOptions( Node node ) { Map<String, String> jettyOptions = null; Node jettyOptionsNode = XMLHandler.getSubNode( node, XML_TAG_JETTY_OPTIONS ); if ( jettyOptionsNode != null ) { jettyOptions = new HashMap<String, String>(); if ( XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPTORS ) != null ) { jettyOptions.put( Const.KETTLE_CARTE_JETTY_ACCEPTORS, XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPTORS ) ); } if ( XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPT_QUEUE_SIZE ) != null ) { jettyOptions.put( Const.KETTLE_CARTE_JETTY_ACCEPT_QUEUE_SIZE, XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPT_QUEUE_SIZE ) ); } if ( XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_LOW_RES_MAX_IDLE_TIME ) != null ) { jettyOptions.put( Const.KETTLE_CARTE_JETTY_RES_MAX_IDLE_TIME, XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_LOW_RES_MAX_IDLE_TIME ) ); } } return jettyOptions; }
@Test public void testParseJettyOption_AllOptions() throws KettleXMLException { Node configNode = getConfigNode( getConfigWithAllOptions() ); Map<String, String> parseJettyOptions = slServerConfig.parseJettyOptions( configNode ); assertNotNull( parseJettyOptions ); assertEquals( 3, parseJettyOptions.size() ); assertTrue( "Expected containing key=" + EXPECTED_ACCEPTORS_KEY, parseJettyOptions .containsKey( EXPECTED_ACCEPTORS_KEY ) ); assertEquals( EXPECTED_ACCEPTORS_VALUE, parseJettyOptions.get( EXPECTED_ACCEPTORS_KEY ) ); assertTrue( "Expected containing key=" + EXPECTED_ACCEPT_QUEUE_SIZE_KEY, parseJettyOptions .containsKey( EXPECTED_ACCEPT_QUEUE_SIZE_KEY ) ); assertEquals( EXPECTED_ACCEPT_QUEUE_SIZE_VALUE, parseJettyOptions.get( EXPECTED_ACCEPT_QUEUE_SIZE_KEY ) ); assertTrue( "Expected containing key=" + EXPECTED_LOW_RES_MAX_IDLE_TIME_KEY, parseJettyOptions .containsKey( EXPECTED_LOW_RES_MAX_IDLE_TIME_KEY ) ); assertEquals( EXPECTED_LOW_RES_MAX_IDLE_TIME_VALUE, parseJettyOptions.get( EXPECTED_LOW_RES_MAX_IDLE_TIME_KEY ) ); }
@Override public int getPermits() { return get(getPermitsAsync()); }
@Test public void testGetPermits() throws InterruptedException { RPermitExpirableSemaphore semaphore = redisson.getPermitExpirableSemaphore("test-semaphore"); assertThat(semaphore.trySetPermits(2)).isTrue(); Assertions.assertEquals(2, semaphore.getPermits()); String acquire1 = semaphore.tryAcquire(200, 1000, TimeUnit.MILLISECONDS); assertThat(acquire1).isNotNull(); String acquire2 = semaphore.tryAcquire(200, 1000, TimeUnit.MILLISECONDS); assertThat(acquire2).isNotNull(); String acquire3 = semaphore.tryAcquire(200, 1000, TimeUnit.MILLISECONDS); assertThat(acquire3).isNull(); Assertions.assertEquals(2, semaphore.getPermits()); Thread.sleep(1100); String acquire4 = semaphore.tryAcquire(200, 1000, TimeUnit.MILLISECONDS); assertThat(acquire4).isNotNull(); Thread.sleep(1100); Assertions.assertEquals(2, semaphore.getPermits()); }
public HostsFileReader(String inFile, String exFile) throws IOException { HostDetails hostDetails = new HostDetails( inFile, Collections.emptySet(), exFile, Collections.emptyMap()); current = new AtomicReference<>(hostDetails); refresh(inFile, exFile); }
@Test public void testHostsFileReader() throws Exception { FileWriter efw = new FileWriter(excludesFile); FileWriter ifw = new FileWriter(includesFile); efw.write("#DFS-Hosts-excluded\n"); efw.write("somehost1\n"); efw.write("#This-is-comment\n"); efw.write("somehost2\n"); efw.write("somehost3 # host3\n"); efw.write("somehost4\n"); efw.write("somehost4 somehost5\n"); efw.close(); ifw.write("#Hosts-in-DFS\n"); ifw.write("somehost1\n"); ifw.write("somehost2\n"); ifw.write("somehost3\n"); ifw.write("#This-is-comment\n"); ifw.write("somehost4 # host4\n"); ifw.write("somehost4 somehost5\n"); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesFile); int includesLen = hfp.getHosts().size(); int excludesLen = hfp.getExcludedHosts().size(); assertEquals(5, includesLen); assertEquals(5, excludesLen); assertTrue(hfp.getHosts().contains("somehost5")); assertFalse(hfp.getHosts().contains("host3")); assertTrue(hfp.getExcludedHosts().contains("somehost5")); assertFalse(hfp.getExcludedHosts().contains("host4")); // test for refreshing hostreader wit new include/exclude host files String newExcludesFile = HOSTS_TEST_DIR + "/dfs1.exclude"; String newIncludesFile = HOSTS_TEST_DIR + "/dfs1.include"; efw = new FileWriter(newExcludesFile); ifw = new FileWriter(newIncludesFile); efw.write("#DFS-Hosts-excluded\n"); efw.write("node1\n"); efw.close(); ifw.write("#Hosts-in-DFS\n"); ifw.write("node2\n"); ifw.close(); hfp.refresh(newIncludesFile, newExcludesFile); assertTrue(hfp.getExcludedHosts().contains("node1")); assertTrue(hfp.getHosts().contains("node2")); HostDetails hostDetails = hfp.getHostDetails(); assertTrue(hostDetails.getExcludedHosts().contains("node1")); assertTrue(hostDetails.getIncludedHosts().contains("node2")); assertEquals(newIncludesFile, hostDetails.getIncludesFile()); assertEquals(newExcludesFile, hostDetails.getExcludesFile()); }
@SuppressWarnings("unchecked") @Udf public <T> List<T> union( @UdfParameter(description = "First array of values") final List<T> left, @UdfParameter(description = "Second array of values") final List<T> right) { if (left == null || right == null) { return null; } final Set<T> combined = Sets.newLinkedHashSet(left); combined.addAll(right); return (List<T>) Arrays.asList(combined.toArray()); }
@Test public void shouldReturnNullForArraysOfOnlyNulls() { final List<String> input1 = Arrays.asList(null, null); final List<String> input2 = Arrays.asList(null, null, null); final List<String> result = udf.union(input1, input2); assertThat(result, contains(nullValue())); }