focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldChooseNonVarargWithNullValuesOfSameSchemas() { // Given: givenFunctions( function(EXPECTED, -1, STRING, STRING), function(OTHER, 0, STRING_VARARGS) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(Arrays.asList(SqlArgument.of(null, null), SqlArgument.of(null, null))); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
public static <K, V> ConcurrentMap<K, V> newConcurrentMap() { return new ConcurrentHashMap<>(); }
@Test public void newConcurrentMap() { Assert.assertNotNull(CollectionKit.newConcurrentMap(0)); Assert.assertNotNull(CollectionKit.newConcurrentMap()); }
public static Message of(String msg, Object... arguments) { return new Message(msg, arguments); }
@Test public void fail_when_message_is_null() { assertThatThrownBy(() -> Message.of(null)) .isInstanceOf(IllegalArgumentException.class); }
@SuppressWarnings("unchecked") @Override public <T> Attribute<T> attr(AttributeKey<T> key) { ObjectUtil.checkNotNull(key, "key"); DefaultAttribute newAttribute = null; for (;;) { final DefaultAttribute[] attributes = this.attributes; final int index = searchAttributeByKey(attributes, key); final DefaultAttribute[] newAttributes; if (index >= 0) { final DefaultAttribute attribute = attributes[index]; assert attribute.key() == key; if (!attribute.isRemoved()) { return attribute; } // let's try replace the removed attribute with a new one if (newAttribute == null) { newAttribute = new DefaultAttribute<T>(this, key); } final int count = attributes.length; newAttributes = Arrays.copyOf(attributes, count); newAttributes[index] = newAttribute; } else { if (newAttribute == null) { newAttribute = new DefaultAttribute<T>(this, key); } final int count = attributes.length; newAttributes = new DefaultAttribute[count + 1]; orderedCopyOnInsert(attributes, count, newAttributes, newAttribute); } if (ATTRIBUTES_UPDATER.compareAndSet(this, attributes, newAttributes)) { return newAttribute; } } }
@Test public void testSetRemove() { AttributeKey<Integer> key = AttributeKey.valueOf("key"); Attribute<Integer> attr = map.attr(key); attr.set(1); assertSame(1, attr.getAndRemove()); Attribute<Integer> attr2 = map.attr(key); attr2.set(2); assertSame(2, attr2.get()); assertNotSame(attr, attr2); }
public static ParamType getVarArgsSchemaFromType(final Type type) { return getSchemaFromType(type, VARARGS_JAVA_TO_ARG_TYPE); }
@Test public void shouldGetGenericSchemaFromParameterizedTypeVariadic() throws NoSuchMethodException { // Given: final Type genericType = getClass().getMethod("genericMapType").getGenericReturnType(); // When: final ParamType returnType = UdfUtil.getVarArgsSchemaFromType(genericType); // Then: assertThat(returnType, is(MapType.of(GenericType.of("K"), GenericType.of("V")))); }
@Override public void preflight(final Path workdir, final String filename) throws BackgroundException { if(!validate(filename)) { throw new InvalidFilenameException(MessageFormat.format(LocaleFactory.localizedString("Cannot create {0}", "Error"), filename)); } // File/directory creation summary: // - Directories with ctera:writepermission but no ctera:createdirectoriespermission allow for file creation only. // - Directories with ctera:createdirectoriespermission but no ctera:writepermission allow for directory and file creation. // - Directories with only ctera:readpermission do not allow for file nor directory creation, for listing only. // In other words: // - file creation is allowed if either ctera:createdirectoriespermission or ctera:writepermission is set or both are set // - directory creation is allowed if ctera:createdirectoriespermission is set. // ctera:createdirectoriespermission or ctera:writepermission try { assumeRole(workdir, WRITEPERMISSION); } catch(AccessDeniedException e) { // ignore and try second option assumeRole(workdir, CREATEDIRECTORIESPERMISSION); } }
@Test public void testPreflightFileAccessGrantedCustomProps() throws Exception { final Path file = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); new CteraTouchFeature(session).preflight(file, new AlphanumericRandomStringService().random()); // assert no fail }
@Override public void writeDouble(final double v) throws IOException { writeLong(Double.doubleToLongBits(v)); }
@Test public void testWriteDoubleForPositionVByteOrder() throws Exception { double v = 1.1d; out.writeDouble(1, v, LITTLE_ENDIAN); long theLong = Double.doubleToLongBits(v); long readLongB = Bits.readLongL(out.buffer, 1); assertEquals(theLong, readLongB); }
@Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { long deadline = System.nanoTime() + unit.toNanos(timeout); for (EventLoop l: activeChildren) { for (;;) { long timeLeft = deadline - System.nanoTime(); if (timeLeft <= 0) { return isTerminated(); } if (l.awaitTermination(timeLeft, TimeUnit.NANOSECONDS)) { break; } } } for (EventLoop l: idleChildren) { for (;;) { long timeLeft = deadline - System.nanoTime(); if (timeLeft <= 0) { return isTerminated(); } if (l.awaitTermination(timeLeft, TimeUnit.NANOSECONDS)) { break; } } } return isTerminated(); }
@Test public void testTerminationFutureSuccessReflectively() throws Exception { Field terminationFutureField = ThreadPerChannelEventLoopGroup.class.getDeclaredField("terminationFuture"); terminationFutureField.setAccessible(true); final Exception[] exceptionHolder = new Exception[1]; for (int i = 0; i < 2; i++) { ThreadPerChannelEventLoopGroup loopGroup = new ThreadPerChannelEventLoopGroup(64); Promise<?> promise = new DefaultPromise<Void>(GlobalEventExecutor.INSTANCE) { @Override public Promise<Void> setSuccess(Void result) { try { return super.setSuccess(result); } catch (IllegalStateException e) { exceptionHolder[0] = e; throw e; } } }; terminationFutureField.set(loopGroup, promise); runTest(loopGroup); } // The global event executor will not terminate, but this will give the test a chance to fail. GlobalEventExecutor.INSTANCE.awaitTermination(100, TimeUnit.MILLISECONDS); assertNull(exceptionHolder[0]); }
public static Duration parse(final String text) { try { final String[] parts = text.split("\\s"); if (parts.length != 2) { throw new IllegalArgumentException("Expected 2 tokens, got: " + parts.length); } final long size = parseNumeric(parts[0]); return buildDuration(size, parts[1]); } catch (final Exception e) { throw new IllegalArgumentException("Invalid duration: '" + text + "'. " + e.getMessage(), e); } }
@Test public void shouldThrowOnTooFewTokens() { // Then: // When: final Exception e = assertThrows( IllegalArgumentException.class, () -> parse("10") ); // Then: assertThat(e.getMessage(), containsString("Expected 2 tokens, got: 1")); }
@Override public boolean isProvisioningEnabled() { return isEnabled() && configuration.getBoolean(GITLAB_AUTH_PROVISIONING_ENABLED).orElse(false); }
@Test public void isProvisioningEnabled_ifProvisioningEnabledButGithubAuthDisabled_returnsFalse() { settings.setProperty(GITLAB_AUTH_PROVISIONING_ENABLED, true); assertThat(config.isProvisioningEnabled()).isFalse(); }
public static ExpressionStmt createArraysAsListExpression() { ExpressionStmt toReturn = new ExpressionStmt(); MethodCallExpr arraysCallExpression = new MethodCallExpr(); SimpleName arraysName = new SimpleName(Arrays.class.getName()); arraysCallExpression.setScope(new NameExpr(arraysName)); arraysCallExpression.setName(new SimpleName("asList")); toReturn.setExpression(arraysCallExpression); return toReturn; }
@Test void createArraysAsListExpression() { ExpressionStmt retrieved = CommonCodegenUtils.createArraysAsListExpression(); assertThat(retrieved).isNotNull(); String expected = "java.util.Arrays.asList();"; String retrievedString = retrieved.toString(); assertThat(retrievedString).isEqualTo(expected); }
public long maxEntrySize() { return maxEntrySize; }
@Test public void testMaxEntrySizeConfig() { withSQLConf( ImmutableMap.of(SparkSQLProperties.EXECUTOR_CACHE_MAX_ENTRY_SIZE, "128"), () -> { Conf conf = new Conf(); assertThat(conf.maxEntrySize()).isEqualTo(128L); }); }
@Override public <VOut> KStream<K, VOut> processValues( final FixedKeyProcessorSupplier<? super K, ? super V, VOut> processorSupplier, final String... stateStoreNames ) { return processValues( processorSupplier, Named.as(builder.newProcessorName(PROCESSVALUES_NAME)), stateStoreNames ); }
@Test public void shouldNotAllowNullNamedOnProcessValues() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.processValues(fixedKeyProcessorSupplier, (Named) null)); assertThat(exception.getMessage(), equalTo("named can't be null")); }
@Override public boolean match(final String rule) { return rule.matches("^current(\\|.+)?"); }
@Test public void testMatch() { Assertions.assertTrue(generator.match("current")); Assertions.assertFalse(generator.match("current|")); Assertions.assertTrue(generator.match("current|YYYY-MM-dd")); }
@Override public void execute(final CommandLine commandLine, final Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try { defaultMQAdminExt.start(); String topic = commandLine.getOptionValue('t').trim(); TopicRouteData topicRouteData = defaultMQAdminExt.examineTopicRouteInfo(topic); printData(topicRouteData, commandLine.hasOption('l')); } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
@Test public void testExecute() { TopicRouteSubCommand cmd = new TopicRouteSubCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[] {"-t unit-test"}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); assertThat(commandLine.getOptionValue('t').trim()).isEqualTo("unit-test"); }
public static <T> List<List<T>> diffList(Collection<T> oldList, Collection<T> newList, BiFunction<T, T, Boolean> sameFunc) { List<T> createList = new LinkedList<>(newList); // 默认都认为是新增的,后续会进行移除 List<T> updateList = new ArrayList<>(); List<T> deleteList = new ArrayList<>(); // 通过以 oldList 为主遍历,找出 updateList 和 deleteList for (T oldObj : oldList) { // 1. 寻找是否有匹配的 T foundObj = null; for (Iterator<T> iterator = createList.iterator(); iterator.hasNext(); ) { T newObj = iterator.next(); // 1.1 不匹配,则直接跳过 if (!sameFunc.apply(oldObj, newObj)) { continue; } // 1.2 匹配,则移除,并结束寻找 iterator.remove(); foundObj = newObj; break; } // 2. 匹配添加到 updateList;不匹配则添加到 deleteList 中 if (foundObj != null) { updateList.add(foundObj); } else { deleteList.add(oldObj); } } return asList(createList, updateList, deleteList); }
@Test public void testDiffList() { // 准备参数 Collection<Dog> oldList = Arrays.asList( new Dog(1, "花花", "hh"), new Dog(2, "旺财", "wc") ); Collection<Dog> newList = Arrays.asList( new Dog(null, "花花2", "hh"), new Dog(null, "小白", "xb") ); BiFunction<Dog, Dog, Boolean> sameFunc = (oldObj, newObj) -> { boolean same = oldObj.getCode().equals(newObj.getCode()); // 如果相等的情况下,需要设置下 id,后续好更新 if (same) { newObj.setId(oldObj.getId()); } return same; }; // 调用 List<List<Dog>> result = CollectionUtils.diffList(oldList, newList, sameFunc); // 断言 assertEquals(result.size(), 3); // 断言 create assertEquals(result.get(0).size(), 1); assertEquals(result.get(0).get(0), new Dog(null, "小白", "xb")); // 断言 update assertEquals(result.get(1).size(), 1); assertEquals(result.get(1).get(0), new Dog(1, "花花2", "hh")); // 断言 delete assertEquals(result.get(2).size(), 1); assertEquals(result.get(2).get(0), new Dog(2, "旺财", "wc")); }
@Override public KeyValueIterator<Windowed<K>, V> fetch(final K key) { Objects.requireNonNull(key, "key cannot be null"); return new MeteredWindowedKeyValueIterator<>( wrapped().fetch(keyBytes(key)), fetchSensor, iteratorDurationSensor, streamsMetrics, serdes::keyFrom, serdes::valueFrom, time, numOpenIterators, openIterators); }
@Test public void shouldThrowNullPointerOnFetchRangeIfFromIsNull() { setUpWithoutContext(); assertThrows(NullPointerException.class, () -> store.fetch(null, "to")); }
@Override public void endInput(int inputId) throws Exception { // sanity check. checkState(inputId >= 1 && inputId <= 2); if (inputId == 1) { userFunction.endFirstInput(nonPartitionedContext); } else { userFunction.endSecondInput(nonPartitionedContext); } }
@Test void testEndInput() throws Exception { AtomicInteger firstInputCounter = new AtomicInteger(); AtomicInteger secondInputCounter = new AtomicInteger(); TwoInputNonBroadcastProcessOperator<Integer, Long, Long> processOperator = new TwoInputNonBroadcastProcessOperator<>( new TwoInputNonBroadcastStreamProcessFunction<Integer, Long, Long>() { @Override public void processRecordFromFirstInput( Integer record, Collector<Long> output, PartitionedContext ctx) throws Exception { // do nothing. } @Override public void processRecordFromSecondInput( Long record, Collector<Long> output, PartitionedContext ctx) throws Exception { // do nothing. } @Override public void endFirstInput(NonPartitionedContext<Long> ctx) { try { ctx.applyToAllPartitions( (out, context) -> { firstInputCounter.incrementAndGet(); out.collect(1L); }); } catch (Exception e) { throw new RuntimeException(e); } } @Override public void endSecondInput(NonPartitionedContext<Long> ctx) { try { ctx.applyToAllPartitions( (out, context) -> { secondInputCounter.incrementAndGet(); out.collect(2L); }); } catch (Exception e) { throw new RuntimeException(e); } } }); try (TwoInputStreamOperatorTestHarness<Integer, Long, Long> testHarness = new TwoInputStreamOperatorTestHarness<>(processOperator)) { testHarness.open(); testHarness.endInput1(); assertThat(firstInputCounter).hasValue(1); testHarness.endInput2(); assertThat(secondInputCounter).hasValue(1); Collection<StreamRecord<Long>> recordOutput = testHarness.getRecordOutput(); assertThat(recordOutput) .containsExactly(new StreamRecord<>(1L), new StreamRecord<>(2L)); } }
@VisibleForTesting public static void validateIngestionConfig(TableConfig tableConfig, @Nullable Schema schema) { IngestionConfig ingestionConfig = tableConfig.getIngestionConfig(); if (ingestionConfig != null) { String tableNameWithType = tableConfig.getTableName(); // Batch if (ingestionConfig.getBatchIngestionConfig() != null) { BatchIngestionConfig cfg = ingestionConfig.getBatchIngestionConfig(); List<Map<String, String>> batchConfigMaps = cfg.getBatchConfigMaps(); try { if (CollectionUtils.isNotEmpty(batchConfigMaps)) { // Validate that BatchConfig can be created batchConfigMaps.forEach(b -> new BatchConfig(tableNameWithType, b)); } } catch (Exception e) { throw new IllegalStateException("Could not create BatchConfig using the batchConfig map", e); } if (tableConfig.isDimTable()) { Preconditions.checkState(cfg.getSegmentIngestionType().equalsIgnoreCase("REFRESH"), "Dimension tables must have segment ingestion type REFRESH"); } } if (tableConfig.isDimTable()) { Preconditions.checkState(ingestionConfig.getBatchIngestionConfig() != null, "Dimension tables must have batch ingestion configuration"); } // Stream // stream config map can either be in ingestion config or indexing config. cannot be in both places if (ingestionConfig.getStreamIngestionConfig() != null) { IndexingConfig indexingConfig = tableConfig.getIndexingConfig(); Preconditions.checkState(indexingConfig == null || MapUtils.isEmpty(indexingConfig.getStreamConfigs()), "Should not use indexingConfig#getStreamConfigs if ingestionConfig#StreamIngestionConfig is provided"); List<Map<String, String>> streamConfigMaps = ingestionConfig.getStreamIngestionConfig().getStreamConfigMaps(); Preconditions.checkState(streamConfigMaps.size() == 1, "Only 1 stream is supported in REALTIME table"); } // Filter config FilterConfig filterConfig = ingestionConfig.getFilterConfig(); if (filterConfig != null) { String filterFunction = filterConfig.getFilterFunction(); if (filterFunction != null) { if (_disableGroovy && FunctionEvaluatorFactory.isGroovyExpression(filterFunction)) { throw new IllegalStateException( "Groovy filter functions are disabled for table config. Found '" + filterFunction + "'"); } try { FunctionEvaluatorFactory.getExpressionEvaluator(filterFunction); } catch (Exception e) { throw new IllegalStateException("Invalid filter function " + filterFunction, e); } } } // Aggregation configs List<AggregationConfig> aggregationConfigs = ingestionConfig.getAggregationConfigs(); Set<String> aggregationSourceColumns = new HashSet<>(); if (!CollectionUtils.isEmpty(aggregationConfigs)) { Preconditions.checkState(!tableConfig.getIndexingConfig().isAggregateMetrics(), "aggregateMetrics cannot be set with AggregationConfig"); Set<String> aggregationColumns = new HashSet<>(); for (AggregationConfig aggregationConfig : aggregationConfigs) { String columnName = aggregationConfig.getColumnName(); String aggregationFunction = aggregationConfig.getAggregationFunction(); if (columnName == null || aggregationFunction == null) { throw new IllegalStateException( "columnName/aggregationFunction cannot be null in AggregationConfig " + aggregationConfig); } FieldSpec fieldSpec = null; if (schema != null) { fieldSpec = schema.getFieldSpecFor(columnName); Preconditions.checkState(fieldSpec != null, "The destination column '" + columnName + "' of the aggregation function must be present in the schema"); Preconditions.checkState(fieldSpec.getFieldType() == FieldSpec.FieldType.METRIC, "The destination column '" + columnName + "' of the aggregation function must be a metric column"); } if (!aggregationColumns.add(columnName)) { throw new IllegalStateException("Duplicate aggregation config found for column '" + columnName + "'"); } ExpressionContext expressionContext; try { expressionContext = RequestContextUtils.getExpression(aggregationConfig.getAggregationFunction()); } catch (Exception e) { throw new IllegalStateException( "Invalid aggregation function '" + aggregationFunction + "' for column '" + columnName + "'", e); } Preconditions.checkState(expressionContext.getType() == ExpressionContext.Type.FUNCTION, "aggregation function must be a function for: %s", aggregationConfig); FunctionContext functionContext = expressionContext.getFunction(); AggregationFunctionType functionType = AggregationFunctionType.getAggregationFunctionType(functionContext.getFunctionName()); validateIngestionAggregation(functionType); List<ExpressionContext> arguments = functionContext.getArguments(); int numArguments = arguments.size(); if (functionType == DISTINCTCOUNTHLL) { Preconditions.checkState(numArguments >= 1 && numArguments <= 2, "DISTINCT_COUNT_HLL can have at most two arguments: %s", aggregationConfig); if (numArguments == 2) { ExpressionContext secondArgument = arguments.get(1); Preconditions.checkState(secondArgument.getType() == ExpressionContext.Type.LITERAL, "Second argument of DISTINCT_COUNT_HLL must be literal: %s", aggregationConfig); String literal = secondArgument.getLiteral().getStringValue(); Preconditions.checkState(StringUtils.isNumeric(literal), "Second argument of DISTINCT_COUNT_HLL must be a number: %s", aggregationConfig); } if (fieldSpec != null) { DataType dataType = fieldSpec.getDataType(); Preconditions.checkState(dataType == DataType.BYTES, "Result type for DISTINCT_COUNT_HLL must be BYTES: %s", aggregationConfig); } } else if (functionType == DISTINCTCOUNTHLLPLUS) { Preconditions.checkState(numArguments >= 1 && numArguments <= 3, "DISTINCT_COUNT_HLL_PLUS can have at most three arguments: %s", aggregationConfig); if (numArguments == 2) { ExpressionContext secondArgument = arguments.get(1); Preconditions.checkState(secondArgument.getType() == ExpressionContext.Type.LITERAL, "Second argument of DISTINCT_COUNT_HLL_PLUS must be literal: %s", aggregationConfig); String literal = secondArgument.getLiteral().getStringValue(); Preconditions.checkState(StringUtils.isNumeric(literal), "Second argument of DISTINCT_COUNT_HLL_PLUS must be a number: %s", aggregationConfig); } if (numArguments == 3) { ExpressionContext thirdArgument = arguments.get(2); Preconditions.checkState(thirdArgument.getType() == ExpressionContext.Type.LITERAL, "Third argument of DISTINCT_COUNT_HLL_PLUS must be literal: %s", aggregationConfig); String literal = thirdArgument.getLiteral().getStringValue(); Preconditions.checkState(StringUtils.isNumeric(literal), "Third argument of DISTINCT_COUNT_HLL_PLUS must be a number: %s", aggregationConfig); } if (fieldSpec != null) { DataType dataType = fieldSpec.getDataType(); Preconditions.checkState(dataType == DataType.BYTES, "Result type for DISTINCT_COUNT_HLL_PLUS must be BYTES: %s", aggregationConfig); } } else if (functionType == SUMPRECISION) { Preconditions.checkState(numArguments >= 2 && numArguments <= 3, "SUM_PRECISION must specify precision (required), scale (optional): %s", aggregationConfig); ExpressionContext secondArgument = arguments.get(1); Preconditions.checkState(secondArgument.getType() == ExpressionContext.Type.LITERAL, "Second argument of SUM_PRECISION must be literal: %s", aggregationConfig); String literal = secondArgument.getLiteral().getStringValue(); Preconditions.checkState(StringUtils.isNumeric(literal), "Second argument of SUM_PRECISION must be a number: %s", aggregationConfig); if (fieldSpec != null) { DataType dataType = fieldSpec.getDataType(); Preconditions.checkState(dataType == DataType.BIG_DECIMAL || dataType == DataType.BYTES, "Result type for DISTINCT_COUNT_HLL must be BIG_DECIMAL or BYTES: %s", aggregationConfig); } } else { Preconditions.checkState(numArguments == 1, "%s can only have one argument: %s", functionType, aggregationConfig); } ExpressionContext firstArgument = arguments.get(0); Preconditions.checkState(firstArgument.getType() == ExpressionContext.Type.IDENTIFIER, "First argument of aggregation function: %s must be identifier, got: %s", functionType, firstArgument.getType()); aggregationSourceColumns.add(firstArgument.getIdentifier()); } if (schema != null) { Preconditions.checkState(new HashSet<>(schema.getMetricNames()).equals(aggregationColumns), "all metric columns must be aggregated"); } // This is required by MutableSegmentImpl.enableMetricsAggregationIfPossible(). // That code will disable ingestion aggregation if all metrics aren't noDictionaryColumns. // But if you do that after the table is already created, all future aggregations will // just be the default value. Map<String, DictionaryIndexConfig> configPerCol = StandardIndexes.dictionary().getConfig(tableConfig, schema); aggregationColumns.forEach(column -> { DictionaryIndexConfig dictConfig = configPerCol.get(column); Preconditions.checkState(dictConfig != null && dictConfig.isDisabled(), "Aggregated column: %s must be a no-dictionary column", column); }); } // Enrichment configs List<EnrichmentConfig> enrichmentConfigs = ingestionConfig.getEnrichmentConfigs(); if (enrichmentConfigs != null) { for (EnrichmentConfig enrichmentConfig : enrichmentConfigs) { RecordEnricherRegistry.validateEnrichmentConfig(enrichmentConfig, new RecordEnricherValidationConfig(_disableGroovy)); } } // Transform configs List<TransformConfig> transformConfigs = ingestionConfig.getTransformConfigs(); if (transformConfigs != null) { Set<String> transformColumns = new HashSet<>(); for (TransformConfig transformConfig : transformConfigs) { String columnName = transformConfig.getColumnName(); String transformFunction = transformConfig.getTransformFunction(); if (columnName == null || transformFunction == null) { throw new IllegalStateException( "columnName/transformFunction cannot be null in TransformConfig " + transformConfig); } if (!transformColumns.add(columnName)) { throw new IllegalStateException("Duplicate transform config found for column '" + columnName + "'"); } if (schema != null) { Preconditions.checkState( schema.getFieldSpecFor(columnName) != null || aggregationSourceColumns.contains(columnName), "The destination column '" + columnName + "' of the transform function must be present in the schema or as a source column for " + "aggregations"); } FunctionEvaluator expressionEvaluator; if (_disableGroovy && FunctionEvaluatorFactory.isGroovyExpression(transformFunction)) { throw new IllegalStateException( "Groovy transform functions are disabled for table config. Found '" + transformFunction + "' for column '" + columnName + "'"); } try { expressionEvaluator = FunctionEvaluatorFactory.getExpressionEvaluator(transformFunction); } catch (Exception e) { throw new IllegalStateException( "Invalid transform function '" + transformFunction + "' for column '" + columnName + "'", e); } List<String> arguments = expressionEvaluator.getArguments(); if (arguments.contains(columnName)) { throw new IllegalStateException( "Arguments of a transform function '" + arguments + "' cannot contain the destination column '" + columnName + "'"); } } } // Complex configs ComplexTypeConfig complexTypeConfig = ingestionConfig.getComplexTypeConfig(); if (complexTypeConfig != null && schema != null) { Map<String, String> prefixesToRename = complexTypeConfig.getPrefixesToRename(); if (MapUtils.isNotEmpty(prefixesToRename)) { Set<String> fieldNames = schema.getColumnNames(); for (String prefix : prefixesToRename.keySet()) { for (String field : fieldNames) { Preconditions.checkState(!field.startsWith(prefix), "Fields in the schema may not begin with any prefix specified in the prefixesToRename" + " config. Name conflict with field: " + field + " and prefix: " + prefix); } } } } SchemaConformingTransformerConfig schemaConformingTransformerConfig = ingestionConfig.getSchemaConformingTransformerConfig(); if (null != schemaConformingTransformerConfig && null != schema) { SchemaConformingTransformer.validateSchema(schema, schemaConformingTransformerConfig); } SchemaConformingTransformerV2Config schemaConformingTransformerV2Config = ingestionConfig.getSchemaConformingTransformerV2Config(); if (null != schemaConformingTransformerV2Config && null != schema) { SchemaConformingTransformerV2.validateSchema(schema, schemaConformingTransformerV2Config); } } }
@Test public void validateIngestionConfig() { Schema schema = new Schema.SchemaBuilder().setSchemaName(TABLE_NAME).build(); // null ingestion config TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).setIngestionConfig(null).build(); TableConfigUtils.validate(tableConfig, schema); // null filter config, transform config IngestionConfig ingestionConfig = new IngestionConfig(); tableConfig.setIngestionConfig(ingestionConfig); TableConfigUtils.validate(tableConfig, schema); // null filter function ingestionConfig.setFilterConfig(new FilterConfig(null)); TableConfigUtils.validate(tableConfig, schema); // valid filterFunction ingestionConfig.setFilterConfig(new FilterConfig("startsWith(columnX, \"myPrefix\")")); TableConfigUtils.validate(tableConfig, schema); // valid filterFunction ingestionConfig.setFilterConfig(new FilterConfig("Groovy({x == 10}, x)")); TableConfigUtils.validate(tableConfig, schema); // invalid filter function ingestionConfig.setFilterConfig(new FilterConfig("Groovy(badExpr)")); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail on invalid filter function string"); } catch (IllegalStateException e) { // expected } ingestionConfig.setFilterConfig(new FilterConfig("fakeFunction(xx)")); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail for invalid filter function"); } catch (IllegalStateException e) { // expected } // empty transform configs ingestionConfig.setFilterConfig(null); ingestionConfig.setTransformConfigs(Collections.emptyList()); TableConfigUtils.validate(tableConfig, schema); // transformed column not in schema ingestionConfig.setTransformConfigs(Collections.singletonList(new TransformConfig("myCol", "reverse(anotherCol)"))); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail for transformedColumn not present in schema"); } catch (IllegalStateException e) { // expected } // using a transformation column in an aggregation IndexingConfig indexingConfig = new IndexingConfig(); indexingConfig.setNoDictionaryColumns(List.of("twiceSum")); tableConfig.setIndexingConfig(indexingConfig); schema = new Schema.SchemaBuilder().setSchemaName(TABLE_NAME).addMetric("twiceSum", FieldSpec.DataType.DOUBLE).build(); ingestionConfig.setTransformConfigs(Collections.singletonList(new TransformConfig("twice", "col * 2"))); ingestionConfig.setAggregationConfigs(Collections.singletonList(new AggregationConfig("twiceSum", "SUM(twice)"))); TableConfigUtils.validate(tableConfig, schema); // valid transform configs schema = new Schema.SchemaBuilder().setSchemaName(TABLE_NAME).addSingleValueDimension("myCol", FieldSpec.DataType.STRING) .build(); indexingConfig.setNoDictionaryColumns(List.of("myCol")); ingestionConfig.setAggregationConfigs(null); ingestionConfig.setTransformConfigs(Collections.singletonList(new TransformConfig("myCol", "reverse(anotherCol)"))); TableConfigUtils.validate(tableConfig, schema); // valid transform configs schema = new Schema.SchemaBuilder().setSchemaName(TABLE_NAME).addSingleValueDimension("myCol", FieldSpec.DataType.STRING) .addMetric("transformedCol", FieldSpec.DataType.LONG).build(); ingestionConfig.setTransformConfigs(Arrays.asList(new TransformConfig("myCol", "reverse(anotherCol)"), new TransformConfig("transformedCol", "Groovy({x+y}, x, y)"))); TableConfigUtils.validate(tableConfig, schema); // invalid transform config since Groovy is disabled try { TableConfigUtils.setDisableGroovy(true); TableConfigUtils.validate(tableConfig, schema, null); // Reset to false TableConfigUtils.setDisableGroovy(false); Assert.fail("Should fail when Groovy functions disabled but found in transform config"); } catch (IllegalStateException e) { // expected } // Using peer download scheme with replication of 1 ingestionConfig.setTransformConfigs(null); SegmentsValidationAndRetentionConfig segmentsValidationAndRetentionConfig = new SegmentsValidationAndRetentionConfig(); segmentsValidationAndRetentionConfig.setReplication("1"); segmentsValidationAndRetentionConfig.setPeerSegmentDownloadScheme(CommonConstants.HTTP_PROTOCOL); tableConfig.setValidationConfig(segmentsValidationAndRetentionConfig); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail when peer download scheme is used with replication of 1"); } catch (IllegalStateException e) { // expected Assert.assertEquals(e.getMessage(), "peerSegmentDownloadScheme can't be used when replication is < 2"); } segmentsValidationAndRetentionConfig.setReplication("2"); tableConfig.setValidationConfig(segmentsValidationAndRetentionConfig); try { TableConfigUtils.validate(tableConfig, schema); } catch (IllegalStateException e) { // expected Assert.fail("Should not fail when peer download scheme is used with replication of > 1"); } // invalid filter config since Groovy is disabled ingestionConfig.setFilterConfig(new FilterConfig("Groovy({timestamp > 0}, timestamp)")); try { TableConfigUtils.setDisableGroovy(true); TableConfigUtils.validate(tableConfig, schema, null); // Reset to false TableConfigUtils.setDisableGroovy(false); Assert.fail("Should fail when Groovy functions disabled but found in filter config"); } catch (IllegalStateException e) { // expected } // null transform column name ingestionConfig.setFilterConfig(null); ingestionConfig.setTransformConfigs(Collections.singletonList(new TransformConfig(null, "reverse(anotherCol)"))); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail for null column name in transform config"); } catch (IllegalStateException e) { // expected } // null transform function string ingestionConfig.setTransformConfigs(Collections.singletonList(new TransformConfig("myCol", null))); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail for null transform function in transform config"); } catch (IllegalStateException e) { // expected } // invalid function ingestionConfig.setTransformConfigs(Collections.singletonList(new TransformConfig("myCol", "fakeFunction(col)"))); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail for invalid transform function in transform config"); } catch (IllegalStateException e) { // expected } // invalid function ingestionConfig.setTransformConfigs(Collections.singletonList(new TransformConfig("myCol", "Groovy(badExpr)"))); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail for invalid transform function in transform config"); } catch (IllegalStateException e) { // expected } // input field name used as destination field ingestionConfig.setTransformConfigs(Collections.singletonList(new TransformConfig("myCol", "reverse(myCol)"))); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail due to use of myCol as arguments and columnName"); } catch (IllegalStateException e) { // expected } // input field name used as destination field ingestionConfig.setTransformConfigs( Collections.singletonList(new TransformConfig("myCol", "Groovy({x + y + myCol}, x, myCol, y)"))); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail due to use of myCol as arguments and columnName"); } catch (IllegalStateException e) { // expected } // duplicate transform config ingestionConfig.setTransformConfigs( Arrays.asList(new TransformConfig("myCol", "reverse(x)"), new TransformConfig("myCol", "lower(y)"))); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail due to duplicate transform config"); } catch (IllegalStateException e) { // expected } // derived columns - should pass ingestionConfig.setTransformConfigs(Arrays.asList(new TransformConfig("transformedCol", "reverse(x)"), new TransformConfig("myCol", "lower(transformedCol)"))); TableConfigUtils.validate(tableConfig, schema); // invalid field name in schema with matching prefix from complexConfigType's prefixesToRename ingestionConfig.setTransformConfigs(null); ingestionConfig.setComplexTypeConfig( new ComplexTypeConfig(null, ".", null, Collections.singletonMap("after.", ""))); schema = new Schema.SchemaBuilder().setSchemaName(TABLE_NAME) .addMultiValueDimension("after.test", FieldSpec.DataType.STRING).build(); try { TableConfigUtils.validate(tableConfig, schema); Assert.fail("Should fail due to name conflict from field name in schema with a prefix in prefixesToRename"); } catch (IllegalStateException e) { // expected } }
@Override public void writeTo(ByteBuf byteBuf) throws LispWriterException { WRITER.writeTo(byteBuf, this); }
@Test public void testSerialization() throws LispReaderException, LispWriterException, LispParseError { ByteBuf byteBuf = Unpooled.buffer(); RegisterWriter writer = new RegisterWriter(); writer.writeTo(byteBuf, register1); RegisterReader reader = new RegisterReader(); LispMapRegister deserialized = reader.readFrom(byteBuf); new EqualsTester().addEqualityGroup(register1, deserialized).testEquals(); }
@Override public void onMatch(RelOptRuleCall call) { final Sort sort = call.rel(0); final SortExchange exchange = call.rel(1); final RelMetadataQuery metadataQuery = call.getMetadataQuery(); if (RelMdUtil.checkInputForCollationAndLimit( metadataQuery, exchange.getInput(), sort.getCollation(), sort.offset, sort.fetch)) { // Don't rewrite anything if the input is already sorted AND the // input node would already return fewer than sort.offset + sort.fetch // rows (e.g. there is already an inner limit applied) return; } RelCollation collation = sort.getCollation(); Preconditions.checkArgument( collation.equals(exchange.getCollation()), "Expected collation on exchange and sort to be the same" ); final RexNode fetch; if (sort.fetch == null) { fetch = null; } else if (sort.offset == null) { fetch = sort.fetch; } else { int total = RexExpressionUtils.getValueAsInt(sort.fetch) + RexExpressionUtils.getValueAsInt(sort.offset); fetch = REX_BUILDER.makeLiteral(total, TYPE_FACTORY.createSqlType(SqlTypeName.INTEGER)); } // do not transform sort-exchange copy when there's no fetch limit, or fetch amount is larger than threshold if (!collation.getFieldCollations().isEmpty() && (fetch == null || RexExpressionUtils.getValueAsInt(fetch) > DEFAULT_SORT_EXCHANGE_COPY_THRESHOLD)) { return; } final RelNode newExchangeInput = sort.copy(sort.getTraitSet(), exchange.getInput(), collation, null, fetch); final RelNode exchangeCopy = exchange.copy(exchange.getTraitSet(), newExchangeInput, exchange.getDistribution()); final RelNode sortCopy = sort.copy(sort.getTraitSet(), exchangeCopy, collation, sort.offset == null ? REX_ZERO : sort.offset, sort.fetch); call.transformTo(sortCopy); }
@Test public void shouldNotMatchOffsetNoLimitNoSort() { // Given: SortExchange exchange = PinotLogicalSortExchange.create(_input, RelDistributions.SINGLETON, RelCollations.EMPTY, false, true); Sort sort = LogicalSort.create(exchange, RelCollations.EMPTY, literal(1), null); Mockito.when(_call.rel(0)).thenReturn(sort); Mockito.when(_call.rel(1)).thenReturn(exchange); // When: PinotSortExchangeCopyRule.SORT_EXCHANGE_COPY.onMatch(_call); // Then: Mockito.verify(_call, Mockito.never()).transformTo(Mockito.any(), Mockito.anyMap()); }
public void reset() { mPhysicalState = RELEASING; mMomentaryPress = false; mLogicalState = INACTIVE; mActiveStateStartTime = 0L; mConsumed = false; }
@Test public void testReset() throws Exception { long millis = 1000; setCurrentTimeMillis(++millis); ModifierKeyState state = new ModifierKeyState(true); Assert.assertFalse(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertFalse(state.isPressed()); state.onPress(); Assert.assertTrue(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertTrue(state.isPressed()); setCurrentTimeMillis(++millis); state.onRelease(DOUBLE_TAP_TIMEOUT, LONG_PRESS_TIMEOUT); Assert.assertTrue(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertFalse(state.isPressed()); state.reset(); Assert.assertFalse(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertFalse(state.isPressed()); state.onPress(); Assert.assertTrue(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertTrue(state.isPressed()); setCurrentTimeMillis(++millis); state.onRelease(DOUBLE_TAP_TIMEOUT, LONG_PRESS_TIMEOUT); Assert.assertTrue(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertFalse(state.isPressed()); }
@Override public boolean createReservation(ReservationId reservationId, String user, Plan plan, ReservationDefinition contract) throws PlanningException { LOG.info("placing the following ReservationRequest: " + contract); try { boolean res = planner.createReservation(reservationId, user, plan, contract); if (res) { LOG.info("OUTCOME: SUCCESS, Reservation ID: " + reservationId.toString() + ", Contract: " + contract.toString()); } else { LOG.info("OUTCOME: FAILURE, Reservation ID: " + reservationId.toString() + ", Contract: " + contract.toString()); } return res; } catch (PlanningException e) { LOG.info("OUTCOME: FAILURE, Reservation ID: " + reservationId.toString() + ", Contract: " + contract.toString()); throw e; } }
@Test public void testSingleSliding() throws PlanningException { prepareBasicPlan(); // create a single request for which we need subsequent (tight) packing. ReservationDefinition rr = new ReservationDefinitionPBImpl(); rr.setArrival(100 * step); rr.setDeadline(120 * step); rr.setRecurrenceExpression(recurrenceExpression); ReservationRequests reqs = new ReservationRequestsPBImpl(); reqs.setInterpreter(ReservationRequestInterpreter.R_ALL); ReservationRequest r = ReservationRequest.newInstance( Resource.newInstance(1024, 1), 200, 10, 10 * step); List<ReservationRequest> list = new ArrayList<ReservationRequest>(); list.add(r); reqs.setReservationResources(list); rr.setReservationRequests(reqs); // submit to agent ReservationId reservationID = ReservationSystemTestUtil .getNewReservationId(); agent.createReservation(reservationID, "u1", plan, rr); // validate results, we expect the second one to be accepted assertTrue("Agent-based allocation failed", reservationID != null); assertTrue("Agent-based allocation failed", plan.getAllReservations() .size() == 3); ReservationAllocation cs = plan.getReservationById(reservationID); assertTrue(cs.toString(), check(cs, 100 * step, 120 * step, 100, 1024, 1)); System.out.println("--------AFTER packed ALLOCATION (queue: " + reservationID + ")----------"); System.out.println(plan.toString()); System.out.println(plan.toCumulativeString()); }
static Collection<TopicPartition> getMatchingTopicPartitions( Admin adminClient, String topicRegex, int startPartition, int endPartition) throws Throwable { final Pattern topicNamePattern = Pattern.compile(topicRegex); // first get list of matching topics List<String> matchedTopics = new ArrayList<>(); ListTopicsResult res = adminClient.listTopics( new ListTopicsOptions().timeoutMs(ADMIN_REQUEST_TIMEOUT)); Map<String, TopicListing> topicListingMap = res.namesToListings().get(); for (Map.Entry<String, TopicListing> topicListingEntry: topicListingMap.entrySet()) { if (!topicListingEntry.getValue().isInternal() && topicNamePattern.matcher(topicListingEntry.getKey()).matches()) { matchedTopics.add(topicListingEntry.getKey()); } } // create a list of topic/partitions List<TopicPartition> out = new ArrayList<>(); DescribeTopicsResult topicsResult = adminClient.describeTopics( matchedTopics, new DescribeTopicsOptions().timeoutMs(ADMIN_REQUEST_TIMEOUT)); Map<String, TopicDescription> topicDescriptionMap = topicsResult.allTopicNames().get(); for (TopicDescription desc: topicDescriptionMap.values()) { List<TopicPartitionInfo> partitions = desc.partitions(); for (TopicPartitionInfo info: partitions) { if ((info.partition() >= startPartition) && (info.partition() <= endPartition)) { out.add(new TopicPartition(desc.name(), info.partition())); } } } return out; }
@Test public void testGetMatchingTopicPartitionsCorrectlyMatchesTopics() throws Throwable { final String topic1 = "test-topic"; final String topic2 = "another-test-topic"; final String topic3 = "one-more"; makeExistingTopicWithOneReplica(topic1, 10); makeExistingTopicWithOneReplica(topic2, 20); makeExistingTopicWithOneReplica(topic3, 30); Collection<TopicPartition> topicPartitions = WorkerUtils.getMatchingTopicPartitions(adminClient, ".*-topic$", 0, 1); assertEquals( Utils.mkSet( new TopicPartition(topic1, 0), new TopicPartition(topic1, 1), new TopicPartition(topic2, 0), new TopicPartition(topic2, 1) ), new HashSet<>(topicPartitions) ); }
@Override public RedisClusterNode clusterGetNodeForKey(byte[] key) { int slot = executorService.getConnectionManager().calcSlot(key); return clusterGetNodeForSlot(slot); }
@Test public void testClusterGetNodeForKey() { RedisClusterNode node = connection.clusterGetNodeForKey("123".getBytes()); assertThat(node).isNotNull(); }
@Override public synchronized void write(int b) throws IOException { mUfsOutStream.write(b); mBytesWritten++; }
@Test public void writeIncreasingBytes() throws IOException, AlluxioException { AlluxioURI ufsPath = getUfsPath(); try (FileOutStream outStream = mFileSystem.createFile(ufsPath)) { for (int i = 0; i < CHUNK_SIZE; i++) { outStream.write(i); } } verifyIncreasingBytesWritten(ufsPath, CHUNK_SIZE); }
@Override public PageResult<MailTemplateDO> getMailTemplatePage(MailTemplatePageReqVO pageReqVO) { return mailTemplateMapper.selectPage(pageReqVO); }
@Test public void testGetMailTemplatePage() { // mock 数据 MailTemplateDO dbMailTemplate = randomPojo(MailTemplateDO.class, o -> { // 等会查询到 o.setName("源码"); o.setCode("test_01"); o.setAccountId(1L); o.setStatus(CommonStatusEnum.ENABLE.getStatus()); o.setCreateTime(buildTime(2023, 2, 3)); }); mailTemplateMapper.insert(dbMailTemplate); // 测试 name 不匹配 mailTemplateMapper.insert(cloneIgnoreId(dbMailTemplate, o -> o.setName("芋道"))); // 测试 code 不匹配 mailTemplateMapper.insert(cloneIgnoreId(dbMailTemplate, o -> o.setCode("test_02"))); // 测试 accountId 不匹配 mailTemplateMapper.insert(cloneIgnoreId(dbMailTemplate, o -> o.setAccountId(2L))); // 测试 status 不匹配 mailTemplateMapper.insert(cloneIgnoreId(dbMailTemplate, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus()))); // 测试 createTime 不匹配 mailTemplateMapper.insert(cloneIgnoreId(dbMailTemplate, o -> o.setCreateTime(buildTime(2023, 1, 5)))); // 准备参数 MailTemplatePageReqVO reqVO = new MailTemplatePageReqVO(); reqVO.setName("源"); reqVO.setCode("est_01"); reqVO.setAccountId(1L); reqVO.setStatus(CommonStatusEnum.ENABLE.getStatus()); reqVO.setCreateTime(buildBetweenTime(2023, 2, 1, 2023, 2, 5)); // 调用 PageResult<MailTemplateDO> pageResult = mailTemplateService.getMailTemplatePage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbMailTemplate, pageResult.getList().get(0)); }
@Override public ReadWriteBuffer onCall(Command command, ReadWriteBuffer parameter) throws IOException { Path p = null; if (null == command) { return null; } if (command.equals(GET_OUTPUT_PATH)) { p = output.getOutputFileForWrite(-1); } else if (command.equals(GET_OUTPUT_INDEX_PATH)) { p = output.getOutputIndexFileForWrite(-1); } else if (command.equals(GET_SPILL_PATH)) { p = output.getSpillFileForWrite(spillNumber++, -1); } else if (command.equals(GET_COMBINE_HANDLER)) { if (null == combinerHandler) { return null; } final ReadWriteBuffer result = new ReadWriteBuffer(8); result.writeLong(combinerHandler.getId()); return result; } else { throw new IOException("Illegal command: " + command.toString()); } if (p != null) { final ReadWriteBuffer result = new ReadWriteBuffer(); result.writeString(p.toUri().getPath()); return result; } else { throw new IOException("MapOutputFile can't allocate spill/output file"); } }
@Test public void testGetCombiner() throws IOException { this.handler = new NativeCollectorOnlyHandler(taskContext, nativeHandler, pusher, combiner); Mockito.when(combiner.getId()).thenReturn(100L); final ReadWriteBuffer result = handler.onCall( NativeCollectorOnlyHandler.GET_COMBINE_HANDLER, null); Assert.assertEquals(100L, result.readLong()); }
@Override public void upgrade() { if (hasBeenRunSuccessfully()) { LOG.debug("Migration already completed."); return; } final Set<String> dashboardIdToViewId = new HashSet<>(); final Consumer<String> recordMigratedDashboardIds = dashboardIdToViewId::add; final Map<String, Set<String>> widgetIdMigrationMapping = new HashMap<>(); final Consumer<Map<String, Set<String>>> recordMigratedWidgetIds = widgetIdMigrationMapping::putAll; final Map<View, Search> newViews = this.dashboardsService.streamAll() .sorted(Comparator.comparing(Dashboard::id)) .map(dashboard -> migrateDashboard(dashboard, recordMigratedDashboardIds, recordMigratedWidgetIds)) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); writeViews(newViews); final MigrationCompleted migrationCompleted = MigrationCompleted.create(dashboardIdToViewId, widgetIdMigrationMapping); writeMigrationCompleted(migrationCompleted); }
@Test @MongoDBFixtures("dashboard_with_minimal_quickvalues_widget.json") public void migrateDashboardWithMinimalQuickValuesWidget() throws Exception { this.migration.upgrade(); final MigrationCompleted migrationCompleted = captureMigrationCompleted(); assertThat(migrationCompleted.migratedDashboardIds()).containsExactly("5c7fc3f9f38ed741ac154697"); assertThat(migrationCompleted.widgetMigrationIds()).containsAllEntriesOf( ImmutableMap.<String, Set<String>>builder() .put("6f2cc355-bcbb-4b3f-be01-bfba299aa51a", ImmutableSet.of("0000016e-b690-426f-0000-016eb690426f", "0000016e-b690-4270-0000-016eb690426f")) .build() ); assertViewsWritten(1, resourceFile("dashboard_with_minimal_quickvalues_widget-expected_views.json")); assertSearchesWritten(1, resourceFile("dashboard_with_minimal_quickvalues_widget-expected_searches.json")); }
@Override public void close() throws IOException { if (closed) { return; } try (PositionOutputStream temp = out) { temp.flush(); if (crcAllocator != null) { crcAllocator.close(); } } finally { closed = true; } }
@Test public void testWriteReadStatisticsAllNulls() throws Exception { // this test assumes statistics will be read Assume.assumeTrue(!shouldIgnoreStatistics(Version.FULL_VERSION, BINARY)); File testFile = temp.newFile(); testFile.delete(); writeSchema = "message example {\n" + "required binary content (UTF8);\n" + "}"; Path path = new Path(testFile.toURI()); MessageType schema = MessageTypeParser.parseMessageType(writeSchema); Configuration configuration = getTestConfiguration(); configuration.setBoolean("parquet.strings.signed-min-max.enabled", true); GroupWriteSupport.setSchema(schema, configuration); // close any filesystems to ensure that the the FS used by the writer picks up the configuration FileSystem.closeAll(); ParquetWriter<Group> writer = new ParquetWriter<Group>(path, configuration, new GroupWriteSupport()); Group r1 = new SimpleGroup(schema); writer.write(r1); writer.close(); ParquetMetadata readFooter = ParquetFileReader.readFooter(configuration, path); // assert the statistics object is not empty org.apache.parquet.column.statistics.Statistics stats = readFooter.getBlocks().get(0).getColumns().get(0).getStatistics(); assertFalse("is empty: " + stats, stats.isEmpty()); // assert the number of nulls are correct for the first block assertEquals("nulls: " + stats, 1, stats.getNumNulls()); }
@Deprecated public static int getScreenWidth(String qualifiers) { for (String qualifier : qualifiers.split("-", 0)) { Matcher matcher = SCREEN_WIDTH_PATTERN.matcher(qualifier); if (matcher.find()) { return Integer.parseInt(matcher.group(1)); } } return -1; }
@Test public void getScreenWidth() { assertThat(Qualifiers.getScreenWidth("w320dp")).isEqualTo(320); assertThat(Qualifiers.getScreenWidth("w320dp-v7")).isEqualTo(320); assertThat(Qualifiers.getScreenWidth("en-rUS-w320dp")).isEqualTo(320); assertThat(Qualifiers.getScreenWidth("en-rUS-w320dp-v7")).isEqualTo(320); assertThat(Qualifiers.getScreenWidth("en-rUS-v7")).isEqualTo(-1); assertThat(Qualifiers.getScreenWidth("de-v23-sw320dp-w1024dp")).isEqualTo(1024); assertThat(Qualifiers.getScreenWidth("en-rUS-sw320dp-v7")).isEqualTo(-1); }
public static PredicateTreeAnalyzerResult analyzePredicateTree(Predicate predicate) { AnalyzerContext context = new AnalyzerContext(); int treeSize = aggregatePredicateStatistics(predicate, false, context); int minFeature = ((int)Math.ceil(findMinFeature(predicate, false, context))) + (context.hasNegationPredicate ? 1 : 0); return new PredicateTreeAnalyzerResult(minFeature, treeSize, context.subTreeSizes); }
@Test void require_that_minfeature_is_min_for_or() { Predicate p = or( and( feature("foo").inSet("bar"), feature("baz").inSet("qux"), feature("quux").inSet("corge")), and( feature("grault").inSet("garply"), feature("waldo").inSet("fred"))); PredicateTreeAnalyzerResult r = PredicateTreeAnalyzer.analyzePredicateTree(p); assertEquals(2, r.minFeature); assertEquals(5, r.treeSize); assertEquals(5, r.sizeMap.size()); assertSizeMapContains(r, pred(p).child(0).child(0), 1); assertSizeMapContains(r, pred(p).child(0).child(1), 1); assertSizeMapContains(r, pred(p).child(0).child(2), 1); assertSizeMapContains(r, pred(p).child(1).child(0), 1); assertSizeMapContains(r, pred(p).child(1).child(1), 1); }
static DatabaseInput toDatabaseInput( Namespace namespace, Map<String, String> metadata, boolean skipNameValidation) { DatabaseInput.Builder builder = DatabaseInput.builder().name(toDatabaseName(namespace, skipNameValidation)); Map<String, String> parameters = Maps.newHashMap(); metadata.forEach( (k, v) -> { if (GLUE_DESCRIPTION_KEY.equals(k)) { builder.description(v); } else if (GLUE_DB_LOCATION_KEY.equals(k)) { builder.locationUri(v); } else { parameters.put(k, v); } }); return builder.parameters(parameters).build(); }
@Test public void testToDatabaseInputEmptyLocation() { Map<String, String> properties = ImmutableMap.of(IcebergToGlueConverter.GLUE_DESCRIPTION_KEY, "description", "key", "val"); DatabaseInput databaseInput = IcebergToGlueConverter.toDatabaseInput(Namespace.of("ns"), properties, false); assertThat(databaseInput.locationUri()).as("Location should not be set").isNull(); assertThat(databaseInput.description()) .as("Description should be set") .isEqualTo("description"); assertThat(databaseInput.parameters()) .as("Parameters should be set") .isEqualTo(ImmutableMap.of("key", "val")); assertThat(databaseInput.name()).as("Database name should be set").isEqualTo("ns"); }
public boolean isRegisteredUser(@Nonnull final JID user, final boolean checkRemoteDomains) { if (xmppServer.isLocal(user)) { try { getUser(user.getNode()); return true; } catch (final UserNotFoundException e) { return false; } } else if (!checkRemoteDomains) { return false; } else { // Look up in the cache using the full JID Boolean isRegistered = remoteUsersCache.get(user.toString()); if (isRegistered == null) { // Check if the bare JID of the user is cached isRegistered = remoteUsersCache.get(user.toBareJID()); if (isRegistered == null) { // No information is cached so check user identity and cache it // A disco#info is going to be sent to the bare JID of the user. This packet // is going to be handled by the remote server. final IQ iq = new IQ(IQ.Type.get); iq.setFrom(xmppServer.getServerInfo().getXMPPDomain()); iq.setTo(user.toBareJID()); iq.setChildElement("query", "http://jabber.org/protocol/disco#info"); final Semaphore completionSemaphore = new Semaphore(0); // Send the disco#info request to the remote server. final IQRouter iqRouter = xmppServer.getIQRouter(); final long timeoutInMillis = REMOTE_DISCO_INFO_TIMEOUT.getValue().toMillis(); iqRouter.addIQResultListener(iq.getID(), new IQResultListener() { @Override public void receivedAnswer(final IQ packet) { final JID from = packet.getFrom(); // Assume that the user is not a registered user Boolean isRegistered = Boolean.FALSE; // Analyze the disco result packet if (IQ.Type.result == packet.getType()) { final Element child = packet.getChildElement(); if (child != null) { for (final Iterator it = child.elementIterator("identity"); it.hasNext();) { final Element identity = (Element) it.next(); final String accountType = identity.attributeValue("type"); if ("registered".equals(accountType) || "admin".equals(accountType)) { isRegistered = Boolean.TRUE; break; } } } } // Update cache of remote registered users remoteUsersCache.put(from.toBareJID(), isRegistered); completionSemaphore.release(); } @Override public void answerTimeout(final String packetId) { Log.warn("The result from the disco#info request was never received. request: {}", iq); completionSemaphore.release(); } }, timeoutInMillis); // Send the request iqRouter.route(iq); // Wait for the response try { completionSemaphore.tryAcquire(timeoutInMillis, TimeUnit.MILLISECONDS); } catch (final InterruptedException e) { Thread.currentThread().interrupt(); Log.warn("Interrupted whilst waiting for response from remote server", e); } isRegistered = remoteUsersCache.computeIfAbsent(user.toBareJID(), ignored -> Boolean.FALSE); } } return isRegistered; } }
@Test public void isRegisteredUserFalseWillReturnTrueForLocalUsers() { final boolean result = userManager.isRegisteredUser(new JID(USER_ID, Fixtures.XMPP_DOMAIN, null), false); assertThat(result, is(true)); }
private AlarmEntityId(final URI uri) { super(uri); }
@Test(expected = IllegalArgumentException.class) public void verifyUnexpectedSchemaRejected() { alarmEntityId("junk:foo"); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldNotMatchIfParamLengthDiffers() { // Given: givenFunctions( function(EXPECTED, -1, STRING) ); // When: final Exception e = assertThrows( KsqlException.class, () -> udfIndex.getFunction(ImmutableList.of(SqlArgument.of(SqlTypes.STRING), SqlArgument.of(SqlTypes.STRING))) ); // Then: assertThat(e.getMessage(), containsString("Function 'name' does not accept parameters " + "(STRING, STRING)")); }
@Override public boolean isReachable(DeviceId deviceId) { String id = deviceId.uri().getSchemeSpecificPart(); JID jid = new JID(id); XmppDeviceId xmppDeviceId = new XmppDeviceId(jid); return controller.getDevice(xmppDeviceId) != null; }
@Test public void testIsReachable() { assertTrue(provider.isReachable(DeviceId.deviceId("reachable@xmpp.org"))); assertFalse(provider.isReachable(DeviceId.deviceId("non-reachable@xmpp.org"))); }
public static ListenableFuture<CustomerId> findEntityIdAsync(TbContext ctx, EntityId originator) { switch (originator.getEntityType()) { case CUSTOMER: return Futures.immediateFuture((CustomerId) originator); case USER: return toCustomerIdAsync(ctx, ctx.getUserService().findUserByIdAsync(ctx.getTenantId(), (UserId) originator)); case ASSET: return toCustomerIdAsync(ctx, ctx.getAssetService().findAssetByIdAsync(ctx.getTenantId(), (AssetId) originator)); case DEVICE: return toCustomerIdAsync(ctx, Futures.immediateFuture(ctx.getDeviceService().findDeviceById(ctx.getTenantId(), (DeviceId) originator))); default: return Futures.immediateFailedFuture(new TbNodeException("Unexpected originator EntityType: " + originator.getEntityType())); } }
@Test public void givenCustomerEntityType_whenFindEntityIdAsync_thenOK() throws ExecutionException, InterruptedException { // GIVEN var customer = new Customer(new CustomerId(UUID.randomUUID())); // WHEN var actualCustomerId = EntitiesCustomerIdAsyncLoader.findEntityIdAsync(ctxMock, customer.getId()).get(); // THEN assertEquals(customer.getId(), actualCustomerId); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof DefaultMappingValue) { DefaultMappingValue that = (DefaultMappingValue) obj; return Objects.equals(action, that.action) && Objects.equals(treatments, that.treatments); } return false; }
@Test public void testEquals() { MappingTreatment treatment1 = DefaultMappingTreatment.builder() .withAddress(ma1) .setUnicastPriority(10) .setUnicastWeight(10) .build(); MappingTreatment treatment2 = DefaultMappingTreatment.builder() .withAddress(ma2) .setUnicastPriority(20) .setUnicastWeight(20) .build(); MappingAction noAction = MappingActions.noAction(); MappingAction forward = MappingActions.forward(); MappingValue value1 = DefaultMappingValue.builder() .withAction(noAction) .add(treatment1) .build(); MappingValue sameAsValue1 = DefaultMappingValue.builder() .withAction(noAction) .add(treatment1) .build(); MappingValue value2 = DefaultMappingValue.builder() .withAction(forward) .add(treatment2) .build(); new EqualsTester() .addEqualityGroup(value1, sameAsValue1) .addEqualityGroup(value2) .testEquals(); }
@Override public boolean accept(ProcessingEnvironment processingEnv, DeclaredType type) { return isEnumType(type); }
@Test void testAccept() { TypeElement typeElement = getType(Color.class); assertTrue(builder.accept(processingEnv, typeElement.asType())); }
@CanIgnoreReturnValue @SuppressWarnings("deprecation") // TODO(b/134064106): design an alternative to no-arg check() public final Ordered containsExactly() { return check().about(iterableEntries()).that(checkNotNull(actual).entries()).containsExactly(); }
@Test public void containsExactlyVarargFailureExtra() { ImmutableMultimap<Integer, String> expected = ImmutableMultimap.of(3, "one", 3, "six", 3, "two", 4, "five", 4, "four"); ListMultimap<Integer, String> actual = LinkedListMultimap.create(expected); actual.put(4, "nine"); actual.put(5, "eight"); expectFailureWhenTestingThat(actual) .containsExactly(3, "one", 3, "six", 3, "two", 4, "five", 4, "four"); assertFailureKeys("unexpected", "---", "expected", "but was"); assertFailureValue("unexpected", "{4=[nine], 5=[eight]}"); }
Map<Path, Set<Integer>> changedLines() { return tracker.changedLines(); }
@Test public void count_single_added_line() throws IOException { String example = "Index: sample1\n" + "===================================================================\n" + "--- a/sample1\n" + "+++ b/sample1\n" + "@@ -0,0 +1 @@\n" + "+added line\n"; printDiff(example); assertThat(underTest.changedLines()).isEqualTo(Collections.singletonMap(rootBaseDir.resolve("sample1"), singleton(1))); }
public static String getDefaultHost(@Nullable String strInterface, @Nullable String nameserver, boolean tryfallbackResolution) throws UnknownHostException { if (strInterface == null || "default".equals(strInterface)) { return cachedHostname; } if (nameserver != null && "default".equals(nameserver)) { nameserver = null; } String[] hosts = getHosts(strInterface, nameserver, tryfallbackResolution); return hosts[0]; }
@Test public void testGetLocalHostIsFast() throws Exception { String hostname1 = DNS.getDefaultHost(DEFAULT); assertNotNull(hostname1); String hostname2 = DNS.getDefaultHost(DEFAULT); long t1 = Time.now(); String hostname3 = DNS.getDefaultHost(DEFAULT); long t2 = Time.now(); assertEquals(hostname3, hostname2); assertEquals(hostname2, hostname1); long interval = t2 - t1; assertTrue( "Took too long to determine local host - caching is not working", interval < 20000); }
public static <T> T visit(final Schema start, final SchemaVisitor<T> visitor) { // Set of Visited Schemas IdentityHashMap<Schema, Schema> visited = new IdentityHashMap<>(); // Stack that contains the Schams to process and afterVisitNonTerminal // functions. // Deque<Either<Schema, Supplier<SchemaVisitorAction>>> // Using either has a cost which we want to avoid... Deque<Object> dq = new ArrayDeque<>(); dq.addLast(start); Object current; while ((current = dq.pollLast()) != null) { if (current instanceof Supplier) { // we are executing a non terminal post visit. SchemaVisitorAction action = ((Supplier<SchemaVisitorAction>) current).get(); switch (action) { case CONTINUE: break; case SKIP_SUBTREE: throw new UnsupportedOperationException(); case SKIP_SIBLINGS: while (dq.getLast() instanceof Schema) { dq.removeLast(); } break; case TERMINATE: return visitor.get(); default: throw new UnsupportedOperationException("Invalid action " + action); } } else { Schema schema = (Schema) current; boolean terminate; if (!visited.containsKey(schema)) { Schema.Type type = schema.getType(); switch (type) { case ARRAY: terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getElementType())); visited.put(schema, schema); break; case RECORD: terminate = visitNonTerminal(visitor, schema, dq, () -> schema.getFields().stream().map(Field::schema) .collect(Collectors.toCollection(ArrayDeque::new)).descendingIterator()); visited.put(schema, schema); break; case UNION: terminate = visitNonTerminal(visitor, schema, dq, schema.getTypes()); visited.put(schema, schema); break; case MAP: terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getValueType())); visited.put(schema, schema); break; case NULL: case BOOLEAN: case BYTES: case DOUBLE: case ENUM: case FIXED: case FLOAT: case INT: case LONG: case STRING: terminate = visitTerminal(visitor, schema, dq); break; default: throw new UnsupportedOperationException("Invalid type " + type); } } else { terminate = visitTerminal(visitor, schema, dq); } if (terminate) { return visitor.get(); } } } return visitor.get(); }
@Test void visit12() { String s12 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": [" + "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"ct2\", \"fields\": " + "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"long\"}" + "]}"; assertEquals("c1.ct2.\"int\".", Schemas.visit(new Schema.Parser().parse(s12), new TestVisitor() { public SchemaVisitorAction visitTerminal(Schema terminal) { sb.append(terminal).append('.'); return SchemaVisitorAction.TERMINATE; } })); }
void startVulnerabilityDetecting( ImmutableList<PluginMatchingResult<VulnDetector>> selectedVulnDetectors) { checkState(currentExecutionStage.equals(ExecutionStage.SERVICE_FINGERPRINTING)); checkState(serviceFingerprintingTimer.isRunning()); this.serviceFingerprintingTimer.stop(); this.vulnerabilityDetectingTimer.start(); this.currentExecutionStage = ExecutionStage.VULNERABILITY_DETECTING; this.selectedVulnDetectors = checkNotNull(selectedVulnDetectors); }
@Test public void startVulnerabilityDetecting_whenStageNotFingerprinting_throwsException() { ExecutionTracer executionTracer = new ExecutionTracer( portScanningTimer, serviceFingerprintingTimer, vulnerabilityDetectingTimer); assertThrows( IllegalStateException.class, () -> executionTracer.startVulnerabilityDetecting( pluginManager.getVulnDetectors(ReconnaissanceReport.getDefaultInstance()))); }
@Override public CompletableFuture<String> getLeaderAddressFuture() { return leaderAddressFuture; }
@Test void testLeaderAddressGetsForwarded() { final CompletableFuture<JobMasterService> jobMasterServiceFuture = new CompletableFuture<>(); DefaultJobMasterServiceProcess serviceProcess = createTestInstance(jobMasterServiceFuture); String testingAddress = "yolohost"; TestingJobMasterService testingJobMasterService = new TestingJobMasterService(testingAddress, null, null); jobMasterServiceFuture.complete(testingJobMasterService); assertThat(serviceProcess.getLeaderAddressFuture()).isCompletedWithValue(testingAddress); }
public void launch(Monitored mp) { if (!lifecycle.tryToMoveTo(Lifecycle.State.STARTING)) { throw new IllegalStateException("Already started"); } monitored = mp; Logger logger = LoggerFactory.getLogger(getClass()); try { launch(logger); } catch (Exception e) { logger.warn("Fail to start {}", processId.getHumanReadableName(), e); hardStop(); } }
@Test public void terminate_if_unexpected_shutdown() throws Exception { Props props = createProps(); final ProcessEntryPoint entryPoint = new ProcessEntryPoint(props, exit, commands, runtime); final StandardProcess process = new StandardProcess(); Thread runner = new Thread(() -> { // starts and waits until terminated entryPoint.launch(process); }); runner.start(); waitForOperational(process, commands); // emulate signal to shutdown process ArgumentCaptor<Thread> shutdownHookCaptor = ArgumentCaptor.forClass(Thread.class); verify(runtime).addShutdownHook(shutdownHookCaptor.capture()); shutdownHookCaptor.getValue().start(); while (process.getState() != State.STOPPED) { Thread.sleep(10L); } // exit before test timeout, ok ! }
public static RectL getBounds(final RectL pIn, final long pCenterX, final long pCenterY, final double pDegrees, final RectL pReuse) { final RectL out = pReuse != null ? pReuse : new RectL(); if (pDegrees == 0) { // optimization out.top = pIn.top; out.left = pIn.left; out.bottom = pIn.bottom; out.right = pIn.right; return out; } final double radians = pDegrees * Math.PI / 180.; final double cos = Math.cos(radians); final double sin = Math.sin(radians); long inputX; long inputY; long outputX; long outputY; inputX = pIn.left; // corner 1 inputY = pIn.top; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); out.top = out.bottom = outputY; out.left = out.right = outputX; inputX = pIn.right; // corner 2 inputY = pIn.top; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); if (out.top > outputY) { out.top = outputY; } if (out.bottom < outputY) { out.bottom = outputY; } if (out.left > outputX) { out.left = outputX; } if (out.right < outputX) { out.right = outputX; } inputX = pIn.right; // corner 3 inputY = pIn.bottom; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); if (out.top > outputY) { out.top = outputY; } if (out.bottom < outputY) { out.bottom = outputY; } if (out.left > outputX) { out.left = outputX; } if (out.right < outputX) { out.right = outputX; } inputX = pIn.left; // corner 4 inputY = pIn.bottom; outputX = getRotatedX(inputX, inputY, pCenterX, pCenterY, cos, sin); outputY = getRotatedY(inputX, inputY, pCenterX, pCenterY, cos, sin); if (out.top > outputY) { out.top = outputY; } if (out.bottom < outputY) { out.bottom = outputY; } if (out.left > outputX) { out.left = outputX; } if (out.right < outputX) { out.right = outputX; } return out; }
@Test public void testGetBoundsSamplesRect() { final Rect in = new Rect(); final Rect out = new Rect(); in.top = 0; // lousy member setting for Rect - see javadoc comments on class in.left = 0; in.bottom = 6; in.right = 4; RectL.getBounds(in, 0, 0, 180, out); Assert.assertEquals(-6, out.top); Assert.assertEquals(0, out.bottom); Assert.assertEquals(-4, out.left); Assert.assertEquals(0, out.right); in.bottom = 7; in.right = 5; RectL.getBounds(in, 0, 0, 90, out); Assert.assertEquals(0, out.top); Assert.assertEquals(-7, out.left); Assert.assertEquals(5, out.bottom); Assert.assertEquals(0, out.right); in.bottom = 8; in.right = 8; RectL.getBounds(in, 0, 0, 45, out); Assert.assertEquals(0, out.top); Assert.assertEquals(-Math.round(8 * Math.sqrt(2) / 2.), out.left); Assert.assertEquals(Math.round(8 * Math.sqrt(2)), out.bottom); Assert.assertEquals(Math.round(8 * Math.sqrt(2) / 2.), out.right); }
float parseFloat(String s) { return parse(s).floatValue(); }
@Test void can_parse_float() { assertEquals(1042.2f, english.parseFloat("1,042.2"), 0); assertEquals(1042.2f, german.parseFloat("1.042,2"), 0); System.out.println(Arrays.toString(NumberFormat.getAvailableLocales())); }
public void updateState(List<TridentTuple> tuples) { try { String bulkRequest = buildRequest(tuples); final Request request = new Request("post", "_bulk"); request.setEntity(new StringEntity(bulkRequest)); Response response = client.performRequest(request); BulkIndexResponse bulkResponse = objectMapper.readValue(response.getEntity().getContent(), BulkIndexResponse.class); if (bulkResponse.hasErrors()) { LOG.warn("failed processing bulk index requests: " + bulkResponse.getFirstError() + ": " + bulkResponse.getFirstResult()); throw new FailedException(); } } catch (IOException e) { LOG.warn("failed processing bulk index requests: " + e.toString()); throw new FailedException(e); } }
@Test public void updateState() throws Exception { List<TridentTuple> tuples = tuples(index, type, documentId, source); state.updateState(tuples); }
static void activateHttpAndHttpsProxies(Settings settings, SettingsDecrypter decrypter) throws MojoExecutionException { List<Proxy> proxies = new ArrayList<>(2); for (String protocol : ImmutableList.of("http", "https")) { if (areProxyPropertiesSet(protocol)) { continue; } settings.getProxies().stream() .filter(Proxy::isActive) .filter(proxy -> protocol.equals(proxy.getProtocol())) .findFirst() .ifPresent(proxies::add); } if (proxies.isEmpty()) { return; } SettingsDecryptionRequest request = new DefaultSettingsDecryptionRequest().setProxies(proxies); SettingsDecryptionResult result = decrypter.decrypt(request); for (SettingsProblem problem : result.getProblems()) { if (problem.getSeverity() == SettingsProblem.Severity.ERROR || problem.getSeverity() == SettingsProblem.Severity.FATAL) { throw new MojoExecutionException( "Unable to decrypt proxy info from settings.xml: " + problem); } } result.getProxies().forEach(MavenSettingsProxyProvider::setProxyProperties); }
@Test public void testActivateHttpAndHttpsProxies_dontOverwriteUserHttps() throws MojoExecutionException { System.setProperty("https.proxyHost", "host"); MavenSettingsProxyProvider.activateHttpAndHttpsProxies( mixedProxyEncryptedSettings, settingsDecrypter); Assert.assertEquals("password1", System.getProperty("http.proxyPassword")); Assert.assertNull(System.getProperty("https.proxyPassword")); }
@Override public Object getParameter(String key) { return param.get(key); }
@Test void testGetParameter() { ConfigRequest configRequest = new ConfigRequest(); String dataId = "id"; String group = "group"; String tenant = "n"; String content = "abc"; configRequest.setContent(content); configRequest.setDataId(dataId); configRequest.setGroup(group); configRequest.setTenant(tenant); assertEquals(dataId, configRequest.getParameter("dataId")); assertEquals(group, configRequest.getParameter("group")); assertEquals(tenant, configRequest.getParameter("tenant")); assertEquals(content, configRequest.getParameter("content")); }
protected Collection<URL> getUserClassPaths() { return userClassPaths; }
@Test public void testGetUserClassPathReturnEmptyListIfJobDirIsNull() throws IOException { final TestJobGraphRetriever testJobGraphRetriever = new TestJobGraphRetriever(null); assertTrue(testJobGraphRetriever.getUserClassPaths().isEmpty()); }
@Nullable public HostsFileEntriesResolver hostsFileEntriesResolver() { return hostsFileEntriesResolver; }
@Test void hostsFileEntriesResolverBadValues() { assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> builder.hostsFileEntriesResolver(null)); }
public static <T> RetryTransformer<T> of(Retry retry) { return new RetryTransformer<>(retry); }
@Test public void shouldThrowMaxRetriesExceptionAfterRetriesExhaustedWhenConfigured() throws InterruptedException { RetryConfig config = RetryConfig.<String>custom() .retryOnResult("retry"::equals) .waitDuration(Duration.ofMillis(50)) .maxAttempts(3) .failAfterMaxAttempts(true) .build(); Retry retry = Retry.of("testName", config); given(helloWorldService.returnHelloWorld()) .willReturn("retry"); Flowable.fromCallable(helloWorldService::returnHelloWorld) .compose(RetryTransformer.of(retry)) .test() .await() .assertFailureAndMessage(MaxRetriesExceededException.class, "Retry 'testName' has exhausted all attempts (3)", "retry"); then(helloWorldService).should(times(3)).returnHelloWorld(); }
@Override public InetSocketAddress resolve(ServerWebExchange exchange) { List<String> xForwardedValues = extractXForwardedValues(exchange); if (!xForwardedValues.isEmpty()) { int index = Math.max(0, xForwardedValues.size() - maxTrustedIndex); return new InetSocketAddress(xForwardedValues.get(index), 0); } return defaultRemoteIpResolver.resolve(exchange); }
@Test public void maxIndexOneFallsBackToRemoteIp() { ServerWebExchange exchange = buildExchange(remoteAddressOnlyBuilder()); InetSocketAddress address = trustOne.resolve(exchange); assertThat(address.getHostName()).isEqualTo("0.0.0.0"); }
@SuppressWarnings("unchecked") public static JobContext cloneContext(JobContext original, Configuration conf ) throws IOException, InterruptedException { try { if (original instanceof MapContext<?,?,?,?>) { return cloneMapContext((Mapper.Context) original, conf, null, null); } else if (original instanceof ReduceContext<?,?,?,?>) { throw new IllegalArgumentException("can't clone ReduceContext"); } else if (original instanceof TaskAttemptContext) { TaskAttemptContext spec = (TaskAttemptContext) original; return (JobContext) TASK_CONTEXT_CONSTRUCTOR.newInstance(conf, spec.getTaskAttemptID()); } else { return (JobContext) JOB_CONTEXT_CONSTRUCTOR.newInstance(conf, original.getJobID()); } } catch (InstantiationException e) { throw new IllegalArgumentException("Can't clone object", e); } catch (IllegalAccessException e) { throw new IllegalArgumentException("Can't clone object", e); } catch (InvocationTargetException e) { throw new IllegalArgumentException("Can't clone object", e); } }
@Test public void testCloneContext() throws Exception { ContextFactory.cloneContext(jobContext, conf); }
public Destination[] createDestinations(int destCount) throws JMSException { final String destName = getClient().getDestName(); ArrayList<Destination> destinations = new ArrayList<>(); if (destName.contains(DESTINATION_SEPARATOR)) { if (getClient().isDestComposite() && (destCount == 1)) { // user was explicit about which destinations to make composite String[] simpleNames = mapToSimpleNames(destName.split(DESTINATION_SEPARATOR)); String joinedSimpleNames = join(simpleNames, DESTINATION_SEPARATOR); // use the type of the 1st destination for the Destination instance byte destinationType = getDestinationType(destName); destinations.add(createCompositeDestination(destinationType, joinedSimpleNames, 1)); } else { LOG.info("User requested multiple destinations, splitting: {}", destName); // either composite with multiple destinations to be suffixed // or multiple non-composite destinations String[] destinationNames = destName.split(DESTINATION_SEPARATOR); for (String splitDestName : destinationNames) { addDestinations(destinations, splitDestName, destCount); } } } else { addDestinations(destinations, destName, destCount); } return destinations.toArray(new Destination[] {}); }
@Test public void testCreateDestinations_multipleComposite() throws JMSException { clientProperties.setDestComposite(true); clientProperties.setDestName("queue://foo,queue://cheese"); Destination[] destinations = jmsClient.createDestinations(1); assertEquals(1, destinations.length); // suffixes should be added assertDestinationNameType("foo,cheese", QUEUE_TYPE, asAmqDest(destinations[0])); }
static int bucketMaxUs(int bucket) { return bucketMinUs(bucket + 1) - 1; }
@Test public void bucketMaxUs() { assertEquals(1, LatencyDistribution.bucketMaxUs(0)); assertEquals(3, LatencyDistribution.bucketMaxUs(1)); assertEquals(7, LatencyDistribution.bucketMaxUs(2)); assertEquals(15, LatencyDistribution.bucketMaxUs(3)); assertEquals(31, LatencyDistribution.bucketMaxUs(4)); assertEquals(63, LatencyDistribution.bucketMaxUs(5)); }
@Override public long nbigram() { return freq2.size(); }
@Test public void testGetNumBigrams() { System.out.println("getNumBigrams"); assertEquals(18303, corpus.nbigram()); }
static Set<String> convertToUpperCaseSet( String[] array ) { if ( array == null ) { return Collections.emptySet(); } Set<String> strings = new HashSet<String>(); for ( String currentString : array ) { strings.add( currentString.toUpperCase() ); } return strings; }
@Test public void convertToUpperCaseSet_null_array() { Set<String> actualResult = MetaInject.convertToUpperCaseSet( null ); assertNotNull( actualResult ); assertTrue( actualResult.isEmpty() ); }
public File getDatabaseFile(String filename) { File dbFile = null; if (filename != null && filename.trim().length() > 0) { dbFile = new File(filename); } if (dbFile == null || dbFile.isDirectory()) { dbFile = new File(new AndroidContextUtil().getDatabasePath("logback.db")); } return dbFile; }
@Test public void emptyFilenameResultsInDefault() throws IOException { final File file = appender.getDatabaseFile(""); assertThat(file, is(notNullValue())); assertThat(file.getName(), is("logback.db")); }
public synchronized Counter findCounter(String group, String name) { if (name.equals("MAP_INPUT_BYTES")) { LOG.warn("Counter name MAP_INPUT_BYTES is deprecated. " + "Use FileInputFormatCounters as group name and " + " BYTES_READ as counter name instead"); return findCounter(FileInputFormatCounter.BYTES_READ); } String newGroupKey = getNewGroupKey(group); if (newGroupKey != null) { group = newGroupKey; } return getGroup(group).getCounterForName(name); }
@Test public void testFilesystemCounter() { GroupFactory groupFactory = new GroupFactoryForTest(); Group fsGroup = groupFactory.newFileSystemGroup(); org.apache.hadoop.mapreduce.Counter count1 = fsGroup.findCounter("ANY_BYTES_READ"); Assert.assertNotNull(count1); // Verify no exception get thrown when finding an unknown counter org.apache.hadoop.mapreduce.Counter count2 = fsGroup.findCounter("Unknown"); Assert.assertNull(count2); }
@Override public String getName() { return _name; }
@Test public void testStringSplitPartTransformFunction() { int index = 1; ExpressionContext expression = RequestContextUtils.getExpression(String.format("split_part(%s, 'ab', %d)", STRING_ALPHANUM_SV_COLUMN, index)); TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap); assertTrue(transformFunction instanceof ScalarTransformFunctionWrapper); assertEquals(transformFunction.getName(), "splitPart"); String[] expectedValues = new String[NUM_ROWS]; for (int i = 0; i < NUM_ROWS; i++) { String[] splitString = StringUtils.splitByWholeSeparator(_stringAlphaNumericSVValues[i], "ab"); if (splitString.length > index) { expectedValues[i] = splitString[index]; } else { expectedValues[i] = "null"; } } testTransformFunction(transformFunction, expectedValues); }
@Override public URL getResource(String name) { ClassLoadingStrategy loadingStrategy = getClassLoadingStrategy(name); log.trace("Received request to load resource '{}'", name); for (ClassLoadingStrategy.Source classLoadingSource : loadingStrategy.getSources()) { URL url = null; switch (classLoadingSource) { case APPLICATION: url = super.getResource(name); break; case PLUGIN: url = findResource(name); break; case DEPENDENCIES: url = findResourceFromDependencies(name); break; } if (url != null) { log.trace("Found resource '{}' in {} classpath", name, classLoadingSource); return url; } else { log.trace("Couldn't find resource '{}' in {}", name, classLoadingSource); } } return null; }
@Test void parentLastGetResourceExistsOnlyInPlugin() throws IOException, URISyntaxException { URL resource = parentLastPluginClassLoader.getResource("META-INF/plugin-file"); assertFirstLine("plugin", resource); }
@Override public Class<ReportAnalysisFailureNotification> getNotificationClass() { return ReportAnalysisFailureNotification.class; }
@Test public void getNotificationClass_is_ReportAnalysisFailureNotification() { assertThat(underTest.getNotificationClass()).isEqualTo(ReportAnalysisFailureNotification.class); }
@Override public void init(TbContext ctx, TbNodeConfiguration configuration) throws TbNodeException { super.init(ctx); this.mqttNodeConfiguration = TbNodeUtils.convert(configuration, TbMqttNodeConfiguration.class); try { this.mqttClient = initClient(ctx); } catch (Exception e) { throw new TbNodeException(e); } }
@Test public void givenFailedByTimeoutConnectResult_whenInit_thenThrowsException() throws ExecutionException, InterruptedException, TimeoutException { mqttNodeConfig.setHost("localhost"); mqttNodeConfig.setClientId("bfrbTESTmfkr23"); mqttNodeConfig.setCredentials(new CertPemCredentials()); mockConnectClient(); given(promiseMock.get(anyLong(), any(TimeUnit.class))).willThrow(new TimeoutException("Failed to connect")); assertThatThrownBy(() -> mqttNode.init(ctxMock, new TbNodeConfiguration(JacksonUtil.valueToTree(mqttNodeConfig)))) .isInstanceOf(TbNodeException.class) .hasMessage("java.lang.RuntimeException: Failed to connect to MQTT broker at localhost:1883.") .extracting(e -> ((TbNodeException) e).isUnrecoverable()) .isEqualTo(false); }
@Override public void handleRequest(RestRequest request, RequestContext requestContext, Callback<RestResponse> callback) { //This code path cannot accept content types or accept types that contain //multipart/related. This is because these types of requests will usually have very large payloads and therefore //would degrade server performance since RestRequest reads everything into memory. if (!isMultipart(request, requestContext, callback)) { _restRestLiServer.handleRequest(request, requestContext, callback); } }
@Test(dataProvider = "requestHandlersData") public void testRequestHandlers(URI uri, String expectedResponse) { RestLiConfig config = new RestLiConfig(); config.addResourcePackageNames("com.linkedin.restli.server.twitter"); config.setDocumentationRequestHandler(new DefaultDocumentationRequestHandler() { @Override public void initialize(RestLiConfig config, Map<String, ResourceModel> rootResources) {/* no-op */} @Override public void handleRequest(RestRequest req, RequestContext ctx, Callback<RestResponse> cb) { cb.onSuccess(new RestResponseBuilder().setEntity(toByteString(DOCUMENTATION_RESPONSE)).build()); } }); config.addCustomRequestHandlers(new CustomRequestHandler()); config.addDebugRequestHandlers(new DebugRequestHandler("a", DEBUG_HANDLER_RESPONSE_A), new DebugRequestHandler("b", DEBUG_HANDLER_RESPONSE_B)); RestLiServer server = new RestLiServer(config, new EasyMockResourceFactory(), createMock(Engine.class)); RestRequest restReq = new RestRequestBuilder(uri).build(); server.handleRequest(restReq, createMock(RequestContext.class), new RestResponseAssertionCallback(expectedResponse)); StreamRequest streamReq = new StreamRequestBuilder(uri).build(EntityStreams.emptyStream()); server.handleRequest(streamReq, createMock(RequestContext.class), new StreamResponseAssertionCallback(expectedResponse)); }
@Override public boolean needToLoad(FilterInvoker invoker) { AbstractInterfaceConfig<?, ?> config = invoker.getConfig(); String enabled = config.getParameter(SentinelConstants.SOFA_RPC_SENTINEL_ENABLED); if (StringUtils.isNotBlank(enabled)) { return Boolean.parseBoolean(enabled); } return RpcConfigs.getOrDefaultValue(SentinelConstants.SOFA_RPC_SENTINEL_ENABLED, true); }
@Test public void testNeedToLoadConsumer() { SentinelSofaRpcConsumerFilter consumerFilter = new SentinelSofaRpcConsumerFilter(); ConsumerConfig consumerConfig = new ConsumerConfig(); consumerConfig.setInterfaceId(Serializer.class.getName()); consumerConfig.setId("BBB"); FilterInvoker invoker = new FilterInvoker(null, null, consumerConfig); assertTrue(consumerFilter.needToLoad(invoker)); consumerConfig.setParameter(SentinelConstants.SOFA_RPC_SENTINEL_ENABLED, "false"); assertFalse(consumerFilter.needToLoad(invoker)); consumerConfig.setParameter(SentinelConstants.SOFA_RPC_SENTINEL_ENABLED, ""); assertTrue(consumerFilter.needToLoad(invoker)); RpcConfigs.putValue(SentinelConstants.SOFA_RPC_SENTINEL_ENABLED, "false"); assertFalse(consumerFilter.needToLoad(invoker)); }
public static String truncateUtf8(String str, int maxBytes) { Charset charset = StandardCharsets.UTF_8; //UTF-8编码单个字符最大长度4 return truncateByByteLength(str, charset, maxBytes, 4, true); }
@Test public void truncateUtf8Test3() { final String str = "一二三四"; final String ret = StrUtil.truncateUtf8(str, 11); assertEquals("一二...", ret); }
@Override public boolean trySetCount(long count) { return get(trySetCountAsync(count)); }
@Test public void testDelete() throws Exception { RCountDownLatch latch = redisson.getCountDownLatch("latch"); latch.trySetCount(1); Assertions.assertTrue(latch.delete()); }
private static Timestamp fromLong(Long elapsedSinceEpoch, TimestampPrecise precise) throws IllegalArgumentException { final long seconds; final int nanos; switch (precise) { case Millis: seconds = Math.floorDiv(elapsedSinceEpoch, (long) THOUSAND); nanos = (int) Math.floorMod(elapsedSinceEpoch, (long) THOUSAND) * MILLION; break; case Micros: seconds = Math.floorDiv(elapsedSinceEpoch, (long) MILLION); nanos = (int) Math.floorMod(elapsedSinceEpoch, (long) MILLION) * THOUSAND; break; case Nanos: seconds = Math.floorDiv(elapsedSinceEpoch, (long) BILLION); nanos = (int) Math.floorMod(elapsedSinceEpoch, (long) BILLION); break; default: throw new IllegalArgumentException("Unknown precision: " + precise); } if (seconds < SECONDS_LOWERLIMIT || seconds > SECONDS_UPPERLIMIT) { throw new IllegalArgumentException("given seconds is out of range"); } if (nanos < NANOSECONDS_LOWERLIMIT || nanos > NANOSECONDS_UPPERLIMIT) { // NOTE here is unexpected cases because exceeded part is // moved to seconds by floor methods throw new IllegalArgumentException("given nanos is out of range"); } return Timestamp.newBuilder().setSeconds(seconds).setNanos(nanos).build(); }
@Test void timestampMicrosConversionSecondsLowerLimit() throws Exception { assertThrows(IllegalArgumentException.class, () -> { TimestampMicrosConversion conversion = new TimestampMicrosConversion(); long exceeded = (ProtoConversions.SECONDS_LOWERLIMIT - 1) * 1000000; conversion.fromLong(exceeded, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros()); }); }
@Override public ByteBuf retainedSlice() { return slice().retain(); }
@Test public void testRetainedSliceAfterReleaseRetainedSlice() { ByteBuf buf = newBuffer(1); ByteBuf buf2 = buf.retainedSlice(0, 1); assertRetainedSliceFailAfterRelease(buf, buf2); }
@Override public JobResourceRequirements requestJobResourceRequirements() { final JobResourceRequirements.Builder builder = JobResourceRequirements.newBuilder(); for (JobInformation.VertexInformation vertex : jobInformation.getVertices()) { builder.setParallelismForJobVertex( vertex.getJobVertexID(), vertex.getMinParallelism(), vertex.getParallelism()); } return builder.build(); }
@Test void testRequestDefaultResourceRequirementsInReactiveMode() throws Exception { final JobGraph jobGraph = createJobGraph(); final Configuration configuration = new Configuration(); configuration.set(JobManagerOptions.SCHEDULER_MODE, SchedulerExecutionMode.REACTIVE); final AdaptiveScheduler scheduler = new AdaptiveSchedulerBuilder( jobGraph, mainThreadExecutor, EXECUTOR_RESOURCE.getExecutor()) .setJobMasterConfiguration(configuration) .build(); assertThat(scheduler.requestJobResourceRequirements()) .isEqualTo( JobResourceRequirements.newBuilder() .setParallelismForJobVertex( JOB_VERTEX.getID(), 1, SchedulerBase.getDefaultMaxParallelism(JOB_VERTEX)) .build()); }
public static DistCpOptions parse(String[] args) throws IllegalArgumentException { CommandLineParser parser = new CustomParser(); CommandLine command; try { command = parser.parse(cliOptions, args, true); } catch (ParseException e) { throw new IllegalArgumentException("Unable to parse arguments. " + Arrays.toString(args), e); } DistCpOptions.Builder builder = parseSourceAndTargetPaths(command); builder .withAtomicCommit( command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch())) .withSyncFolder( command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch())) .withDeleteMissing( command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch())) .withIgnoreFailures( command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch())) .withOverwrite( command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch())) .withAppend( command.hasOption(DistCpOptionSwitch.APPEND.getSwitch())) .withSkipCRC( command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch())) .withBlocking( !command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch())) .withVerboseLog( command.hasOption(DistCpOptionSwitch.VERBOSE_LOG.getSwitch())) .withDirectWrite( command.hasOption(DistCpOptionSwitch.DIRECT_WRITE.getSwitch())) .withUseIterator( command.hasOption(DistCpOptionSwitch.USE_ITERATOR.getSwitch())) .withUpdateRoot( command.hasOption(DistCpOptionSwitch.UPDATE_ROOT.getSwitch())); if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.DIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseDiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.RDIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseRdiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) { builder.withFiltersFile( getVal(command, DistCpOptionSwitch.FILTERS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) { builder.withLogPath( new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) { final String workPath = getVal(command, DistCpOptionSwitch.WORK_PATH.getSwitch()); if (workPath != null && !workPath.isEmpty()) { builder.withAtomicWorkPath(new Path(workPath)); } } if (command.hasOption(DistCpOptionSwitch.TRACK_MISSING.getSwitch())) { builder.withTrackMissing( new Path(getVal( command, DistCpOptionSwitch.TRACK_MISSING.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.BANDWIDTH.getSwitch())) { try { final Float mapBandwidth = Float.parseFloat( getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch())); builder.withMapBandwidth(mapBandwidth); } catch (NumberFormatException e) { throw new IllegalArgumentException("Bandwidth specified is invalid: " + getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()), e); } } if (command.hasOption( DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())) { try { final Integer numThreads = Integer.parseInt(getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())); builder.withNumListstatusThreads(numThreads); } catch (NumberFormatException e) { throw new IllegalArgumentException( "Number of liststatus threads is invalid: " + getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.MAX_MAPS.getSwitch())) { try { final Integer maps = Integer.parseInt( getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch())); builder.maxMaps(maps); } catch (NumberFormatException e) { throw new IllegalArgumentException("Number of maps is invalid: " + getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) { builder.withCopyStrategy( getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())) { builder.preserve( getVal(command, DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.FILE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.FILE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.SIZE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.SIZE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch())) { final String chunkSizeStr = getVal(command, DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch().trim()); try { int csize = Integer.parseInt(chunkSizeStr); csize = csize > 0 ? csize : 0; LOG.info("Set distcp blocksPerChunk to " + csize); builder.withBlocksPerChunk(csize); } catch (NumberFormatException e) { throw new IllegalArgumentException("blocksPerChunk is invalid: " + chunkSizeStr, e); } } if (command.hasOption(DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch())) { final String copyBufferSizeStr = getVal(command, DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch().trim()); try { int copyBufferSize = Integer.parseInt(copyBufferSizeStr); builder.withCopyBufferSize(copyBufferSize); } catch (NumberFormatException e) { throw new IllegalArgumentException("copyBufferSize is invalid: " + copyBufferSizeStr, e); } } return builder.build(); }
@Test public void testParsebandwidth() { DistCpOptions options = OptionsParser.parse(new String[] { "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); assertThat(options.getMapBandwidth()).isCloseTo(0f, within(DELTA)); options = OptionsParser.parse(new String[] { "-bandwidth", "11.2", "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); assertThat(options.getMapBandwidth()).isCloseTo(11.2f, within(DELTA)); }
public void dropBackend(Backend backend) { idToBackendRef.remove(backend.getId()); Map<Long, AtomicLong> copiedReportVersions = Maps.newHashMap(idToReportVersionRef); copiedReportVersions.remove(backend.getId()); idToReportVersionRef = ImmutableMap.copyOf(copiedReportVersions); }
@Test public void testDropBackend() throws Exception { new MockUp<RunMode>() { @Mock public RunMode getCurrentRunMode() { return RunMode.SHARED_DATA; } }; Backend be = new Backend(10001, "newHost", 1000); service.addBackend(be); LocalMetastore localMetastore = new LocalMetastore(globalStateMgr, null, null); new Expectations() { { service.getBackendWithHeartbeatPort("newHost", 1000); minTimes = 0; result = be; globalStateMgr.getLocalMetastore(); minTimes = 0; result = localMetastore; } }; service.addBackend(be); be.setStarletPort(1001); service.dropBackend("newHost", 1000, false); Backend beIP = service.getBackendWithHeartbeatPort("newHost", 1000); Assert.assertTrue(beIP == null); }
@Override public FSDataOutputStream create(Path path, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { String confUmask = mAlluxioConf.getString(PropertyKey.SECURITY_AUTHORIZATION_PERMISSION_UMASK); Mode mode = ModeUtils.applyFileUMask(Mode.defaults(), confUmask); return this.create(path, new FsPermission(mode.toShort()), overwrite, bufferSize, replication, blockSize, progress); }
@Test public void hadoopShouldLoadFileSystemWithMultipleZkUri() throws Exception { org.apache.hadoop.conf.Configuration conf = getConf(); URI uri = URI.create(Constants.HEADER + "zk@host1:2181,host2:2181,host3:2181/tmp/path.txt"); org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.get(uri, conf); FileSystem hfs = getHadoopFilesystem(fs); assertTrue(hfs.mFileSystem.getConf().getBoolean(PropertyKey.ZOOKEEPER_ENABLED)); assertEquals("host1:2181,host2:2181,host3:2181", hfs.mFileSystem.getConf().get(PropertyKey.ZOOKEEPER_ADDRESS)); uri = URI.create(Constants.HEADER + "zk@host1:2181;host2:2181;host3:2181/tmp/path.txt"); fs = getHadoopFilesystem(org.apache.hadoop.fs.FileSystem.get(uri, conf)); assertTrue(hfs.mFileSystem.getConf().getBoolean(PropertyKey.ZOOKEEPER_ENABLED)); assertEquals("host1:2181,host2:2181,host3:2181", hfs.mFileSystem.getConf().get(PropertyKey.ZOOKEEPER_ADDRESS)); }
void removeWatchers() { List<NamespaceWatcher> localEntries = Lists.newArrayList(entries); while (localEntries.size() > 0) { NamespaceWatcher watcher = localEntries.remove(0); if (entries.remove(watcher)) { try { log.debug("Removing watcher for path: " + watcher.getUnfixedPath()); RemoveWatchesBuilderImpl builder = new RemoveWatchesBuilderImpl(client); builder.internalRemoval(watcher, watcher.getUnfixedPath()); } catch (Exception e) { log.error("Could not remove watcher for path: " + watcher.getUnfixedPath()); } } } }
@Test public void testMissingNode() throws Exception { CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1)); try { client.start(); WatcherRemovalFacade removerClient = (WatcherRemovalFacade) client.newWatcherRemoveCuratorFramework(); Watcher w = new Watcher() { @Override public void process(WatchedEvent event) { // NOP } }; try { removerClient.getData().usingWatcher(w).forPath("/one/two/three"); fail("Should have thrown NoNodeException"); } catch (KeeperException.NoNodeException expected) { // expected } removerClient.removeWatchers(); } finally { TestCleanState.closeAndTestClean(client); } }
public Optional<ReadwriteSplittingDataSourceGroupRule> findDataSourceGroupRule(final String dataSourceName) { return Optional.ofNullable(dataSourceRuleGroups.get(dataSourceName)); }
@Test void assertFindDataSourceGroupRule() { Optional<ReadwriteSplittingDataSourceGroupRule> actual = createReadwriteSplittingRule().findDataSourceGroupRule("readwrite"); assertTrue(actual.isPresent()); assertDataSourceGroupRule(actual.get()); }
@Override public Iterable<Token> tokenize(String input, Language language, StemMode stemMode, boolean removeAccents) { if (input.isEmpty()) return List.of(); List<Token> tokens = textToTokens(input, analyzerFactory.getAnalyzer(language, stemMode, removeAccents)); log.log(Level.FINEST, () -> "Tokenized '" + language + "' text='" + input + "' into: n=" + tokens.size() + ", tokens=" + tokens); return tokens; }
@Test public void testLithuanianTokenizer() { String text = "Žalgirio mūšio data yra 1410 metai"; Iterable<Token> tokens = luceneLinguistics().getTokenizer() .tokenize(text, Language.LITHUANIAN, StemMode.ALL, true); assertEquals(List.of("žalgir", "mūš", "dat", "1410", "met"), tokenStrings(tokens)); }
Map<TaskId, Task> allTasks() { // not bothering with an unmodifiable map, since the tasks themselves are mutable, but // if any outside code modifies the map or the tasks, it would be a severe transgression. if (stateUpdater != null) { final Map<TaskId, Task> ret = stateUpdater.getTasks().stream().collect(Collectors.toMap(Task::id, x -> x)); ret.putAll(tasks.allTasksPerId()); ret.putAll(tasks.pendingTasksToInit().stream().collect(Collectors.toMap(Task::id, x -> x))); return ret; } else { return tasks.allTasksPerId(); } }
@Test public void shouldReturnRunningTasksStateUpdaterTasksAndTasksToInitInAllTasks() { final StreamTask activeTaskToInit = statefulTask(taskId01, taskId01ChangelogPartitions) .inState(State.CREATED) .withInputPartitions(taskId03Partitions).build(); final StreamTask runningActiveTask = statefulTask(taskId03, taskId03ChangelogPartitions) .inState(State.RUNNING) .withInputPartitions(taskId03Partitions).build(); final StandbyTask standbyTaskInStateUpdater = standbyTask(taskId02, taskId02ChangelogPartitions) .inState(State.RUNNING) .withInputPartitions(taskId02Partitions).build(); final TasksRegistry tasks = mock(TasksRegistry.class); final TaskManager taskManager = setUpTaskManager(ProcessingMode.AT_LEAST_ONCE, tasks, true); when(stateUpdater.getTasks()).thenReturn(mkSet(standbyTaskInStateUpdater)); when(tasks.allTasksPerId()).thenReturn(mkMap(mkEntry(taskId03, runningActiveTask))); when(tasks.pendingTasksToInit()).thenReturn(mkSet(activeTaskToInit)); assertEquals( taskManager.allTasks(), mkMap( mkEntry(taskId03, runningActiveTask), mkEntry(taskId02, standbyTaskInStateUpdater), mkEntry(taskId01, activeTaskToInit) ) ); }
@Override public Iterable<K> get() { return StateFetchingIterators.readAllAndDecodeStartingFrom( cache, beamFnStateClient, keysRequest, keyCoder); }
@Test public void testGetCached() throws Exception { FakeBeamFnStateClient fakeBeamFnStateClient = new FakeBeamFnStateClient( ImmutableMap.of( keysStateKey(), KV.of(ByteArrayCoder.of(), asList(A, B)), key(A), KV.of(StringUtf8Coder.of(), asList("A1", "A2", "A3")), key(B), KV.of(StringUtf8Coder.of(), asList("B1", "B2")))); Cache<?, ?> cache = Caches.eternal(); { // The first side input will populate the cache. MultimapSideInput<byte[], String> multimapSideInput = new MultimapSideInput<>( cache, fakeBeamFnStateClient, "instructionId", keysStateKey(), ByteArrayCoder.of(), StringUtf8Coder.of(), true); assertArrayEquals( new String[] {"A1", "A2", "A3"}, Iterables.toArray(multimapSideInput.get(A), String.class)); assertArrayEquals( new String[] {"B1", "B2"}, Iterables.toArray(multimapSideInput.get(B), String.class)); assertArrayEquals( new String[] {}, Iterables.toArray(multimapSideInput.get(UNKNOWN), String.class)); assertArrayEquals( new byte[][] {A, B}, Iterables.toArray(multimapSideInput.get(), byte[].class)); } { // The next side input will load all of its contents from the cache. MultimapSideInput<byte[], String> multimapSideInput = new MultimapSideInput<>( cache, requestBuilder -> { throw new IllegalStateException("Unexpected call for test."); }, "instructionId", keysStateKey(), ByteArrayCoder.of(), StringUtf8Coder.of(), true); assertArrayEquals( new String[] {"A1", "A2", "A3"}, Iterables.toArray(multimapSideInput.get(A), String.class)); assertArrayEquals( new String[] {"B1", "B2"}, Iterables.toArray(multimapSideInput.get(B), String.class)); assertArrayEquals( new String[] {}, Iterables.toArray(multimapSideInput.get(UNKNOWN), String.class)); assertArrayEquals( new byte[][] {A, B}, Iterables.toArray(multimapSideInput.get(), byte[].class)); } }
public static List<Path> pluginUrls(Path topPath) throws IOException { boolean containsClassFiles = false; Set<Path> archives = new TreeSet<>(); LinkedList<DirectoryEntry> dfs = new LinkedList<>(); Set<Path> visited = new HashSet<>(); if (isArchive(topPath)) { return Collections.singletonList(topPath); } DirectoryStream<Path> topListing = Files.newDirectoryStream( topPath, PLUGIN_PATH_FILTER ); dfs.push(new DirectoryEntry(topListing)); visited.add(topPath); try { while (!dfs.isEmpty()) { Iterator<Path> neighbors = dfs.peek().iterator; if (!neighbors.hasNext()) { dfs.pop().stream.close(); continue; } Path adjacent = neighbors.next(); if (Files.isSymbolicLink(adjacent)) { try { Path symlink = Files.readSymbolicLink(adjacent); // if symlink is absolute resolve() returns the absolute symlink itself Path parent = adjacent.getParent(); if (parent == null) { continue; } Path absolute = parent.resolve(symlink).toRealPath(); if (Files.exists(absolute)) { adjacent = absolute; } else { continue; } } catch (IOException e) { // See https://issues.apache.org/jira/browse/KAFKA-6288 for a reported // failure. Such a failure at this stage is not easily reproducible and // therefore an exception is caught and ignored after issuing a // warning. This allows class scanning to continue for non-broken plugins. log.warn( "Resolving symbolic link '{}' failed. Ignoring this path.", adjacent, e ); continue; } } if (!visited.contains(adjacent)) { visited.add(adjacent); if (isArchive(adjacent)) { archives.add(adjacent); } else if (isClassFile(adjacent)) { containsClassFiles = true; } else { DirectoryStream<Path> listing = Files.newDirectoryStream( adjacent, PLUGIN_PATH_FILTER ); dfs.push(new DirectoryEntry(listing)); } } } } finally { while (!dfs.isEmpty()) { dfs.pop().stream.close(); } } if (containsClassFiles) { if (archives.isEmpty()) { return Collections.singletonList(topPath); } log.warn("Plugin path contains both java archives and class files. Returning only the" + " archives"); } return Arrays.asList(archives.toArray(new Path[0])); }
@Test public void testEmptyStructurePluginUrls() throws Exception { createBasicDirectoryLayout(); assertEquals(Collections.emptyList(), PluginUtils.pluginUrls(pluginPath)); }
@Override public ImmutableList<String> computeEntrypoint(List<String> jvmFlags) throws IOException { try (JarFile jarFile = new JarFile(jarPath.toFile())) { String mainClass = jarFile.getManifest().getMainAttributes().getValue(Attributes.Name.MAIN_CLASS); if (mainClass == null) { throw new IllegalArgumentException( "`Main-Class:` attribute for an application main class not defined in the input JAR's " + "manifest (`META-INF/MANIFEST.MF` in the JAR)."); } ImmutableList.Builder<String> entrypoint = ImmutableList.builder(); entrypoint.add("java"); entrypoint.addAll(jvmFlags); entrypoint.add("-jar"); entrypoint.add(JarLayers.APP_ROOT + "/" + jarPath.getFileName().toString()); return entrypoint.build(); } }
@Test public void testComputeEntrypoint_noMainClass() throws URISyntaxException { Path standardJar = Paths.get(Resources.getResource(STANDARD_JAR_EMPTY).toURI()); StandardPackagedProcessor standardPackagedModeProcessor = new StandardPackagedProcessor(standardJar, JAR_JAVA_VERSION); IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, () -> standardPackagedModeProcessor.computeEntrypoint(ImmutableList.of())); assertThat(exception) .hasMessageThat() .isEqualTo( "`Main-Class:` attribute for an application main class not defined in the input JAR's manifest " + "(`META-INF/MANIFEST.MF` in the JAR)."); }
static <S> List<Class<S>> getAllExtensionClass(Class<S> service) { return InnerEnhancedServiceLoader.getServiceLoader(service).getAllExtensionClass(findClassLoader(), true); }
@Test public void getAllExtensionClass() { List<Class<Hello>> allExtensionClass = EnhancedServiceLoader.getAllExtensionClass(Hello.class); assertThat(allExtensionClass.get(3).getSimpleName()).isEqualTo((LatinHello.class.getSimpleName())); assertThat(allExtensionClass.get(2).getSimpleName()).isEqualTo((FrenchHello.class.getSimpleName())); assertThat(allExtensionClass.get(1).getSimpleName()).isEqualTo((EnglishHello.class.getSimpleName())); assertThat(allExtensionClass.get(0).getSimpleName()).isEqualTo((ChineseHello.class.getSimpleName())); }
@Override public void close() { close(Duration.ofMillis(0)); }
@Test public void shouldThrowOnBeginTransactionIfProducerIsClosed() { buildMockProducer(true); producer.close(); assertThrows(IllegalStateException.class, producer::beginTransaction); }
public static void addSecurityProvider(Properties properties) { properties.keySet().stream() .filter(key -> key.toString().matches("security\\.provider(\\.\\d+)?")) .sorted(Comparator.comparing(String::valueOf)).forEach(key -> addSecurityProvider(properties.get(key).toString())); }
@Test void addSecurityProviderTestWithConfigForUnconfigurableProvider() { removeAllDummyProviders(); int providersCountBefore = Security.getProviders().length; SecurityProviderLoader.addSecurityProvider(DummyProvider.class.getName()+":0:Configure"); Provider[] providersAfter = Security.getProviders(); Provider provider = Security.getProvider(DummyProvider.PROVIDER_NAME); assertEquals(providersCountBefore + 1, providersAfter.length); assertNotNull(provider, "Provider not installed."); assertEquals(DummyProvider.class, provider.getClass()); assertEquals(provider, providersAfter[providersAfter.length - 1]); }
protected static void parse(OldExcelExtractor extractor, XHTMLContentHandler xhtml) throws TikaException, IOException, SAXException { // Get the whole text, as a single string String text = extractor.getText(); // Split and output String line; BufferedReader reader = new BufferedReader(new StringReader(text)); while ((line = reader.readLine()) != null) { xhtml.startElement("p"); xhtml.characters(line); xhtml.endElement("p"); } }
@Test @Disabled public void testMetadata() throws Exception { TikaInputStream stream = getTestFile(file); Metadata metadata = new Metadata(); ContentHandler handler = new BodyContentHandler(); OldExcelParser parser = new OldExcelParser(); parser.parse(stream, handler, metadata, new ParseContext()); // We can get the content type assertEquals("application/vnd.ms-excel.sheet.4", metadata.get(Metadata.CONTENT_TYPE)); // But no other metadata assertEquals(null, metadata.get(TikaCoreProperties.TITLE)); assertEquals(null, metadata.get(TikaCoreProperties.SUBJECT)); }
public void removeJobMetricsGroup(JobID jobId) { if (jobId != null) { TaskManagerJobMetricGroup groupToClose; synchronized (this) { // synchronization isn't strictly necessary as of FLINK-24864 groupToClose = jobs.remove(jobId); } if (groupToClose != null) { groupToClose.close(); } } }
@Test void testRemoveInvalidJobID() { metricGroup.removeJobMetricsGroup(JOB_ID); }
private RabinFingerprint() { }
@Test public void testRabinFingerprint() { SchemaWriter writer = new SchemaWriter("SomeType"); writer.writeInt32("id", 0); writer.writeString("name", null); writer.writeInt8("age", (byte) 0); writer.writeArrayOfTimestamp("times", null); Schema schema = writer.build(); assertEquals(3662264393229655598L, schema.getSchemaId()); }
public static void tripSuggestions( List<CharSequence> suggestions, final int maxSuggestions, List<CharSequence> stringsPool) { while (suggestions.size() > maxSuggestions) { removeSuggestion(suggestions, maxSuggestions, stringsPool); } }
@Test public void testTrimSuggestionsWithMultipleRecycleBackToPool() { ArrayList<CharSequence> list = new ArrayList<>( Arrays.<CharSequence>asList( "typed", "something", "duped", new StringBuilder("duped"), new StringBuilder("new"), new StringBuilder("car"), "something")); Assert.assertEquals(0, mStringPool.size()); IMEUtil.tripSuggestions(list, 2, mStringPool); Assert.assertEquals(2, list.size()); Assert.assertEquals("typed", list.get(0)); Assert.assertEquals("something", list.get(1)); Assert.assertEquals(3, mStringPool.size()); Assert.assertEquals("duped", mStringPool.get(0).toString()); Assert.assertTrue(mStringPool.get(0) instanceof StringBuilder); Assert.assertEquals("new", mStringPool.get(1).toString()); Assert.assertTrue(mStringPool.get(1) instanceof StringBuilder); Assert.assertEquals("car", mStringPool.get(2).toString()); Assert.assertTrue(mStringPool.get(2) instanceof StringBuilder); }
@Override public void suspend() { if (!started) { return; } LOG.info("Suspending the slot manager."); slotManagerMetricGroup.close(); // stop the timeout checks for the TaskManagers if (clusterReconciliationCheck != null) { clusterReconciliationCheck.cancel(false); clusterReconciliationCheck = null; } slotStatusSyncer.close(); taskManagerTracker.clear(); resourceTracker.clear(); unfulfillableJobs.clear(); resourceManagerId = null; resourceAllocator = null; resourceEventListener = null; started = false; }
@Test void testCloseAfterSuspendDoesNotThrowException() throws Exception { new Context() { { runTest(() -> getSlotManager().suspend()); } }; }
public int distanceOf(PartitionTableView partitionTableView) { int distance = 0; for (int i = 0; i < partitions.length; i++) { distance += distanceOf(partitions[i], partitionTableView.partitions[i]); } return distance; }
@Test public void testDistance_whenSomeReplicasNull() throws Exception { // distanceOf([A, B, C, D...], [A, B, null...]) == count(null) * MAX_REPLICA_COUNT PartitionTableView table1 = createRandomPartitionTable(); InternalPartition[] partitions = extractPartitions(table1); PartitionReplica[] replicas = partitions[0].getReplicasCopy(); for (int i = 3; i < MAX_REPLICA_COUNT; i++) { replicas[i] = null; } partitions[0] = new ReadonlyInternalPartition(replicas, 0, partitions[0].version()); PartitionTableView table2 = new PartitionTableView(partitions); assertEquals((MAX_REPLICA_COUNT - 3) * MAX_REPLICA_COUNT, table2.distanceOf(table1)); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatDescribeTables() { // Given: final DescribeTables describeTables = new DescribeTables(Optional.empty(), false); // When: final String formatted = SqlFormatter.formatSql(describeTables); // Then: assertThat(formatted, is("DESCRIBE TABLES")); }
@Override public InterpreterResult interpret(String cypherQuery, InterpreterContext interpreterContext) { LOGGER.info("Opening session"); if (StringUtils.isBlank(cypherQuery)) { return new InterpreterResult(Code.SUCCESS); } final List<String> queries = isMultiStatementEnabled ? Arrays.asList(cypherQuery.split(";[^'|^\"|^(\\w+`)]")) : Arrays.asList(cypherQuery); if (queries.size() == 1) { final String query = queries.get(0); return runQuery(query, interpreterContext); } else { final int lastIndex = queries.size() - 1; final List<String> subQueries = queries.subList(0, lastIndex); for (String query : subQueries) { runQuery(query, interpreterContext); } return runQuery(queries.get(lastIndex), interpreterContext); } }
@Test void testMultiLineInterpreter() { Properties p = new Properties(); p.setProperty(Neo4jConnectionManager.NEO4J_SERVER_URL, neo4jContainer.getBoltUrl()); p.setProperty(Neo4jConnectionManager.NEO4J_AUTH_TYPE, Neo4jAuthType.NONE.toString()); p.setProperty(Neo4jConnectionManager.NEO4J_MAX_CONCURRENCY, "50"); p.setProperty(Neo4jCypherInterpreter.NEO4J_MULTI_STATEMENT, "true"); Neo4jCypherInterpreter multiLineInterpreter = new Neo4jCypherInterpreter(p); context = InterpreterContext.builder() .setInterpreterOut(new InterpreterOutput()) .build(); InterpreterResult result = multiLineInterpreter.interpret("CREATE (n:Node{name: ';'});" + "\nRETURN 1 AS val;", context); assertEquals(Code.SUCCESS, result.code()); assertEquals("val\n1\n", result.toString().replace(TABLE_RESULT_PREFIX, StringUtils.EMPTY)); result = multiLineInterpreter.interpret("CREATE (n:Node{name: \";\"}); " + "RETURN 2 AS `other;Val`;", context); assertEquals(Code.SUCCESS, result.code()); assertEquals("other;Val\n2\n", result.toString().replace(TABLE_RESULT_PREFIX, StringUtils.EMPTY)); result = multiLineInterpreter.interpret("match (n:Node{name: ';'}) " + "return count(n) AS count", context); assertEquals("count\n2\n", result.toString().replace(TABLE_RESULT_PREFIX, StringUtils.EMPTY)); result = multiLineInterpreter.interpret("match (n:Node) detach delete n; " + "match (n:Node) return count(n) AS count", context); assertEquals("count\n0\n", result.toString().replace(TABLE_RESULT_PREFIX, StringUtils.EMPTY)); }
@Nullable public static URI getUriWithScheme(@Nullable final URI uri, final String scheme) { if (uri == null) { return null; } try { return new URI( scheme, uri.getUserInfo(), uri.getHost(), uri.getPort(), uri.getPath(), uri.getQuery(), uri.getFragment()); } catch (URISyntaxException e) { throw new RuntimeException("Could not parse URI.", e); } }
@Test public void testGetUriWithScheme() throws Exception { assertEquals("gopher", Tools.getUriWithScheme(new URI("http://example.com"), "gopher").getScheme()); assertNull(Tools.getUriWithScheme(new URI("http://example.com"), null).getScheme()); assertNull(Tools.getUriWithScheme(null, "http")); }
public boolean validate(final Protocol protocol, final LoginOptions options) { return protocol.validate(this, options); }
@Test public void testLoginWithoutPass() { Credentials credentials = new Credentials("guest", null); assertFalse(credentials.validate(new TestProtocol(Scheme.ftp), new LoginOptions())); }
@Override public CRMaterial deserialize(JsonElement json, Type type, JsonDeserializationContext context) throws JsonParseException { return determineJsonElementForDistinguishingImplementers(json, context, TYPE, ARTIFACT_ORIGIN); }
@Test public void shouldDeserializeGitMaterialType() { JsonObject jsonObject = new JsonObject(); jsonObject.addProperty("type", "git"); materialTypeAdapter.deserialize(jsonObject, type, jsonDeserializationContext); verify(jsonDeserializationContext).deserialize(jsonObject, CRGitMaterial.class); }
@Override public Character getCharAndRemove(K name) { return null; }
@Test public void testGetCharAndRemoveDefault() { assertEquals('x', HEADERS.getCharAndRemove("name1", 'x')); }
@Override public T build(ConfigurationSourceProvider provider, String path) throws IOException, ConfigurationException { try (InputStream input = provider.open(requireNonNull(path))) { final JsonNode node = mapper.readTree(createParser(input)); if (node == null) { throw ConfigurationParsingException .builder("Configuration at " + path + " must not be empty") .build(path); } return build(node, path); } catch (JsonParseException e) { throw ConfigurationParsingException .builder("Malformed " + formatName) .setCause(e) .setLocation(e.getLocation()) .setDetail(e.getMessage()) .build(path); } }
@Test void handlesSingleElementArrayOverride() throws Exception { System.setProperty("dw.type", "overridden"); final Example example = factory.build(configurationSourceProvider, validFile); assertThat(example.getType()) .singleElement() .isEqualTo("overridden"); }