focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static URI applyParameters(URI uri, Map<String, String> queryParameters) throws URISyntaxException { return applyParameters(uri, queryParameters, ""); }
@Test public void testApplyParameters() throws Exception { URI uri = new URI("http://0.0.0.0:61616"); Map<String,String> parameters = new HashMap<String, String>(); parameters.put("t.proxyHost", "localhost"); parameters.put("t.proxyPort", "80"); uri = URISupport.applyParameters(uri, parameters); Map<String,String> appliedParameters = URISupport.parseParameters(uri); assertEquals("all params applied with no prefix", 2, appliedParameters.size()); // strip off params again uri = URISupport.createURIWithQuery(uri, null); uri = URISupport.applyParameters(uri, parameters, "joe"); appliedParameters = URISupport.parseParameters(uri); assertTrue("no params applied as none match joe", appliedParameters.isEmpty()); uri = URISupport.applyParameters(uri, parameters, "t."); verifyParams(URISupport.parseParameters(uri)); }
public Optional<DateTime> nextFutureTime(JobTriggerDto trigger) { final DateTime now = clock.nowUTC(); final DateTime lastNextTime = trigger.nextTime(); final DateTime lastExecutionTime = trigger.lock().lastLockTime(); final JobSchedule schedule = trigger.schedule(); // This is using nextTime to make sure we take the runtime into account and schedule at // exactly after the last nextTime. final Optional<DateTime> optionalNextTime = schedule.calculateNextTime(lastExecutionTime, lastNextTime, clock); if (!optionalNextTime.isPresent()) { return Optional.empty(); } DateTime nextTime = optionalNextTime.get(); // If calculated nextTime is in the past, calculate next time until it is in the future // TODO: Is this something we should notify the user about? If a job is using this helper method it probably // doesn't care about this situation. Jobs where it's important that the time doesn't automatically // advance, should probably use a different helper method. while (!nextTime.isAfter(now)) { LOG.debug("New nextTime <{}> is in the past, re-calculating again", nextTime); nextTime = schedule.calculateNextTime(lastExecutionTime, nextTime, clock).orElse(null); if (nextTime == null) { return Optional.empty(); } } return Optional.of(nextTime); }
@Test public void nextFutureTime() { final JobTriggerDto trigger = JobTriggerDto.builderWithClock(clock) .jobDefinitionId("abc-123") .jobDefinitionType("event-processor-execution-v1") .schedule(IntervalJobSchedule.builder() .interval(1) .unit(TimeUnit.SECONDS) .build()) .build(); final DateTime nextFutureTime1 = strategies.nextFutureTime(trigger).orElse(null); assertThat(nextFutureTime1) .isNotNull() .isGreaterThanOrEqualTo(clock.nowUTC()) .isEqualByComparingTo(clock.nowUTC().plusSeconds(1)); clock.plus(10, TimeUnit.SECONDS); final DateTime nextFutureTime2 = strategies.nextFutureTime(trigger).orElse(null); assertThat(nextFutureTime2) .isNotNull() .isGreaterThanOrEqualTo(clock.nowUTC()) .isEqualByComparingTo(clock.nowUTC().plusSeconds(1)); }
public static <T> Values<T> of(Iterable<T> elems) { return new Values<>(elems, Optional.absent(), Optional.absent(), false); }
@Test @Category(NeedsRunner.class) public void testCreateParameterizedType() throws Exception { PCollection<TimestampedValue<String>> output = p.apply( Create.of( TimestampedValue.of("a", new Instant(0)), TimestampedValue.of("b", new Instant(0)))); PAssert.that(output) .containsInAnyOrder( TimestampedValue.of("a", new Instant(0)), TimestampedValue.of("b", new Instant(0))); p.run(); }
public PickTableLayoutForPredicate pickTableLayoutForPredicate() { return new PickTableLayoutForPredicate(metadata); }
@Test public void replaceWithExistsWhenNoLayoutExist() { ColumnHandle columnHandle = new TpchColumnHandle("nationkey", BIGINT); tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> { p.variable("nationkey", BIGINT); return p.filter(p.rowExpression("nationkey = BIGINT '44'"), p.tableScan( nationTableHandle, ImmutableList.of(p.variable("nationkey", BIGINT)), ImmutableMap.of(p.variable("nationkey", BIGINT), columnHandle), TupleDomain.none(), TupleDomain.none())); }) .matches(values("A")); tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> { p.variable("nationkey"); return p.filter(p.rowExpression("nationkey = BIGINT '44'"), p.tableScan( nationTableHandle, ImmutableList.of(variable("nationkey", BIGINT)), ImmutableMap.of(variable("nationkey", BIGINT), columnHandle), TupleDomain.none(), TupleDomain.none())); }) .matches(values("A")); }
static <T> T getWildcardMappedObject(final Map<String, T> mapping, final String query) { T value = mapping.get(query); if (value == null) { for (String key : mapping.keySet()) { // Turn the search key into a regex, using all characters but the * as a literal. String regex = Arrays.stream(key.split("\\*")) // split in parts that do not have a wildcard in them .map(Pattern::quote) // each part should be used as a literal (not as a regex or partial regex) .collect(Collectors.joining(".*")); // join all literal parts with a regex representation on the wildcard. if (key.endsWith("*")) { // the 'split' will have removed any trailing wildcard characters. Correct for that. regex += ".*"; } if (query.matches(regex)) { value = mapping.get(key); break; } } } return value; }
@Test public void testExactFalse() throws Exception { // Setup test fixture. final Map<String, Object> haystack = Map.of("myplugin/foo", new Object()); // Execute system under test. final Object result = PluginServlet.getWildcardMappedObject(haystack, "myplugin/bar"); // Verify results. assertNull(result); }
public static Optional<Expression> convert( org.apache.flink.table.expressions.Expression flinkExpression) { if (!(flinkExpression instanceof CallExpression)) { return Optional.empty(); } CallExpression call = (CallExpression) flinkExpression; Operation op = FILTERS.get(call.getFunctionDefinition()); if (op != null) { switch (op) { case IS_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::isNull); case NOT_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::notNull); case LT: return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call); case LT_EQ: return convertFieldAndLiteral( Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call); case GT: return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call); case GT_EQ: return convertFieldAndLiteral( Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call); case EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.isNaN(ref); } else { return Expressions.equal(ref, lit); } }, call); case NOT_EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.notNaN(ref); } else { return Expressions.notEqual(ref, lit); } }, call); case NOT: return onlyChildAs(call, CallExpression.class) .flatMap(FlinkFilters::convert) .map(Expressions::not); case AND: return convertLogicExpression(Expressions::and, call); case OR: return convertLogicExpression(Expressions::or, call); case STARTS_WITH: return convertLike(call); } } return Optional.empty(); }
@Test public void testNotEquals() { for (Pair<String, Object> pair : FIELD_VALUE_LIST) { UnboundPredicate<?> expected = org.apache.iceberg.expressions.Expressions.notEqual(pair.first(), pair.second()); Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert( resolve(Expressions.$(pair.first()).isNotEqual(Expressions.lit(pair.second())))); assertThat(actual).isPresent(); assertPredicatesMatch(expected, actual.get()); Optional<org.apache.iceberg.expressions.Expression> actual1 = FlinkFilters.convert( resolve(Expressions.lit(pair.second()).isNotEqual(Expressions.$(pair.first())))); assertThat(actual1).isPresent(); assertPredicatesMatch(expected, actual1.get()); } }
@Override public void setRampUpPercent(long rampUpPercent) { Validate.isTrue((rampUpPercent >= 0) && (rampUpPercent <= 100), "rampUpPercent must be a value between 0 and 100"); this.rampUpPercent = rampUpPercent; }
@Test(expected = IllegalArgumentException.class) public void testSetRampUpPercent_lessThan0() { sampler.setRampUpPercent(-1); }
public static Builder builder() { return new Builder(); }
@Test void fail_if_partial_match_on_key_without_a_query() { assertThatThrownBy(() -> ComponentQuery.builder().setQualifiers(PROJECT).setPartialMatchOnKey(false).build()) .isInstanceOf(IllegalArgumentException.class) .hasMessage("A query must be provided if a partial match on key is specified."); }
static MethodWrapper none() { return new MethodWrapper(null, false); }
@Test public void testNone() { MethodWrapper none = MethodWrapper.none(); assertThat(none.isPresent()).isFalse(); assertThat(none.getMethod()).isNull(); }
@Override public KeyVersion getKeyVersion(String versionName) throws IOException { try { return getExtension().keyVersionCache.get(versionName); } catch (ExecutionException ex) { Throwable cause = ex.getCause(); if (cause instanceof KeyNotFoundException) { return null; } else if (cause instanceof IOException) { throw (IOException) cause; } else { throw new IOException(cause); } } }
@Test public void testKeyVersion() throws Exception { KeyProvider.KeyVersion mockKey = Mockito.mock(KeyProvider.KeyVersion.class); KeyProvider mockProv = Mockito.mock(KeyProvider.class); Mockito.when(mockProv.getKeyVersion(Mockito.eq("k1@0"))) .thenReturn(mockKey); Mockito.when(mockProv.getKeyVersion(Mockito.eq("k2@0"))).thenReturn(null); Mockito.when(mockProv.getConf()).thenReturn(new Configuration()); KeyProvider cache = new CachingKeyProvider(mockProv, 100, 100); // asserting caching Assert.assertEquals(mockKey, cache.getKeyVersion("k1@0")); Mockito.verify(mockProv, Mockito.times(1)) .getKeyVersion(Mockito.eq("k1@0")); Assert.assertEquals(mockKey, cache.getKeyVersion("k1@0")); Mockito.verify(mockProv, Mockito.times(1)) .getKeyVersion(Mockito.eq("k1@0")); Thread.sleep(200); Assert.assertEquals(mockKey, cache.getKeyVersion("k1@0")); Mockito.verify(mockProv, Mockito.times(2)) .getKeyVersion(Mockito.eq("k1@0")); // asserting no caching when key is not known cache = new CachingKeyProvider(mockProv, 100, 100); Assert.assertEquals(null, cache.getKeyVersion("k2@0")); Mockito.verify(mockProv, Mockito.times(1)) .getKeyVersion(Mockito.eq("k2@0")); Assert.assertEquals(null, cache.getKeyVersion("k2@0")); Mockito.verify(mockProv, Mockito.times(2)) .getKeyVersion(Mockito.eq("k2@0")); }
public static DynamicVoter parse(String input) { input = input.trim(); int atIndex = input.indexOf("@"); if (atIndex < 0) { throw new IllegalArgumentException("No @ found in dynamic voter string."); } if (atIndex == 0) { throw new IllegalArgumentException("Invalid @ at beginning of dynamic voter string."); } String idString = input.substring(0, atIndex); int nodeId; try { nodeId = Integer.parseInt(idString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse node id in dynamic voter string.", e); } if (nodeId < 0) { throw new IllegalArgumentException("Invalid negative node id " + nodeId + " in dynamic voter string."); } input = input.substring(atIndex + 1); if (input.isEmpty()) { throw new IllegalArgumentException("No hostname found after node id."); } String host; if (input.startsWith("[")) { int endBracketIndex = input.indexOf("]"); if (endBracketIndex < 0) { throw new IllegalArgumentException("Hostname began with left bracket, but no right " + "bracket was found."); } host = input.substring(1, endBracketIndex); input = input.substring(endBracketIndex + 1); } else { int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following hostname could be found."); } host = input.substring(0, endColonIndex); input = input.substring(endColonIndex); } if (!input.startsWith(":")) { throw new IllegalArgumentException("Port section must start with a colon."); } input = input.substring(1); int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following port could be found."); } String portString = input.substring(0, endColonIndex); int port; try { port = Integer.parseInt(portString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse port in dynamic voter string.", e); } if (port < 0 || port > 65535) { throw new IllegalArgumentException("Invalid port " + port + " in dynamic voter string."); } String directoryIdString = input.substring(endColonIndex + 1); Uuid directoryId; try { directoryId = Uuid.fromString(directoryIdString); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Failed to parse directory ID in dynamic voter string.", e); } return new DynamicVoter(directoryId, nodeId, host, port); }
@Test public void testParseDynamicVoter2() { assertEquals(new DynamicVoter(Uuid.fromString("__0IZ-0DRNazJ49kCZ1EMQ"), 100, "192.128.0.100", (short) 800), DynamicVoter.parse("100@192.128.0.100:800:__0IZ-0DRNazJ49kCZ1EMQ")); }
public static Env addEnvironment(String name) { if (StringUtils.isBlank(name)) { throw new RuntimeException("Cannot add a blank environment: " + "[" + name + "]"); } name = getWellFormName(name); if (STRING_ENV_MAP.containsKey(name)) { // has been existed logger.debug("{} already exists.", name); } else { // not existed STRING_ENV_MAP.put(name, new Env(name)); } return STRING_ENV_MAP.get(name); }
@Test(expected = RuntimeException.class) public void testAddEnvironmentSpacesString() { Env.addEnvironment(" "); }
@Override public <T> AsyncResult<T> startProcess(Callable<T> task) { return startProcess(task, null); }
@Test void testSuccessfulTaskWithCallback() { assertTimeout(ofMillis(3000), () -> { // Instantiate a new executor and start a new 'null' task ... final var executor = new ThreadAsyncExecutor(); final var result = new Object(); when(task.call()).thenReturn(result); final var asyncResult = executor.startProcess(task, callback); assertNotNull(asyncResult); asyncResult.await(); // Prevent timing issues, and wait until the result is available assertTrue(asyncResult.isCompleted()); // Our task should only execute once ... verify(task, times(1)).call(); // ... same for the callback, we expect our object verify(callback, times(1)).onComplete(eq(result)); verify(callback, times(0)).onError(exceptionCaptor.capture()); // ... and the result should be exactly the same object assertSame(result, asyncResult.getValue()); }); }
@Override public List<JreInfoRestResponse> getJresMetadata(@Nullable String os, @Nullable String arch) { Predicate<JreInfoRestResponse> osFilter = isBlank(os) ? jre -> true : (jre -> OS.from(jre.os()) == OS.from(os)); Predicate<JreInfoRestResponse> archFilter = isBlank(arch) ? jre -> true : (jre -> Arch.from(jre.arch()) == Arch.from(arch)); return metadata.values().stream() .filter(osFilter) .filter(archFilter) .toList(); }
@Test void getJresMetadata_shouldFail_whenFilteredWithUnsupportedArchValue() { String anyUnsupportedArch = "not-supported"; assertThatThrownBy(() -> jresHandler.getJresMetadata(null, anyUnsupportedArch)) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Unsupported architecture: '" + anyUnsupportedArch + "'"); }
public LogicalSchema getIntermediateSchema() { return intermediateSchema; }
@Test public void shouldBuildPullQueryIntermediateSchemaSelectKeyNonWindowed() { // Given: selects = ImmutableList.of(new SingleColumn(K_REF, Optional.of(ALIAS))); when(keyFormat.isWindowed()).thenReturn(false); when(analysis.getSelectColumnNames()).thenReturn(ImmutableSet.of(ColumnName.of("K"))); // When: final QueryProjectNode projectNode = new QueryProjectNode( NODE_ID, source, selects, metaStore, ksqlConfig, analysis, false, plannerOptions, false ); // Then: final LogicalSchema expectedSchema = QueryLogicalPlanUtil.buildIntermediateSchema( INPUT_SCHEMA, true, false); assertThat(expectedSchema, is(projectNode.getIntermediateSchema())); }
@VisibleForTesting public void validateDictDataValueUnique(Long id, String dictType, String value) { DictDataDO dictData = dictDataMapper.selectByDictTypeAndValue(dictType, value); if (dictData == null) { return; } // 如果 id 为空,说明不用比较是否为相同 id 的字典数据 if (id == null) { throw exception(DICT_DATA_VALUE_DUPLICATE); } if (!dictData.getId().equals(id)) { throw exception(DICT_DATA_VALUE_DUPLICATE); } }
@Test public void testValidateDictDataValueUnique_valueDuplicateForCreate() { // 准备参数 String dictType = randomString(); String value = randomString(); // mock 数据 dictDataMapper.insert(randomDictDataDO(o -> { o.setDictType(dictType); o.setValue(value); })); // 调用,校验异常 assertServiceException(() -> dictDataService.validateDictDataValueUnique(null, dictType, value), DICT_DATA_VALUE_DUPLICATE); }
@Override public DynamicTableSink createDynamicTableSink(Context context) { Configuration conf = FlinkOptions.fromMap(context.getCatalogTable().getOptions()); checkArgument(!StringUtils.isNullOrEmpty(conf.getString(FlinkOptions.PATH)), "Option [path] should not be empty."); setupTableOptions(conf.getString(FlinkOptions.PATH), conf); ResolvedSchema schema = context.getCatalogTable().getResolvedSchema(); sanityCheck(conf, schema); setupConfOptions(conf, context.getObjectIdentifier(), context.getCatalogTable(), schema); setupSortOptions(conf, context.getConfiguration()); return new HoodieTableSink(conf, schema); }
@Test void testIndexTypeCheck() { ResolvedSchema schema = SchemaBuilder.instance() .field("f0", DataTypes.INT().notNull()) .field("f1", DataTypes.VARCHAR(20)) .field("f2", DataTypes.TIMESTAMP(3)) .field("ts", DataTypes.TIMESTAMP(3)) .primaryKey("f0") .build(); // Index type unset. The default value will be ok final MockContext sourceContext1 = MockContext.getInstance(this.conf, schema, "f2"); assertDoesNotThrow(() -> new HoodieTableFactory().createDynamicTableSink(sourceContext1)); // Invalid index type will throw exception this.conf.set(FlinkOptions.INDEX_TYPE, "BUCKET_AA"); final MockContext sourceContext2 = MockContext.getInstance(this.conf, schema, "f2"); assertThrows(IllegalArgumentException.class, () -> new HoodieTableFactory().createDynamicTableSink(sourceContext2)); // Valid index type will be ok this.conf.set(FlinkOptions.INDEX_TYPE, "BUCKET"); final MockContext sourceContext3 = MockContext.getInstance(this.conf, schema, "f2"); assertDoesNotThrow(() -> new HoodieTableFactory().createDynamicTableSink(sourceContext3)); }
public Map<String, Parameter> generateMergedStepParams( WorkflowSummary workflowSummary, Step stepDefinition, StepRuntime stepRuntime, StepRuntimeSummary runtimeSummary) { Map<String, ParamDefinition> allParamDefs = new LinkedHashMap<>(); // Start with default step level params if present Map<String, ParamDefinition> globalDefault = defaultParamManager.getDefaultStepParams(); if (globalDefault != null) { ParamsMergeHelper.mergeParams( allParamDefs, globalDefault, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_DEFAULT)); } // Merge in injected params returned by step if present (template schema) Map<String, ParamDefinition> injectedParams = stepRuntime.injectRuntimeParams(workflowSummary, stepDefinition); maybeOverrideParamType(allParamDefs); if (injectedParams != null) { maybeOverrideParamType(injectedParams); ParamsMergeHelper.mergeParams( allParamDefs, injectedParams, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.TEMPLATE_SCHEMA)); } // Merge in params applicable to step type Optional<Map<String, ParamDefinition>> defaultStepTypeParams = defaultParamManager.getDefaultParamsForType(stepDefinition.getType()); if (defaultStepTypeParams.isPresent()) { LOG.debug("Merging step level default for {}", stepDefinition.getType()); ParamsMergeHelper.mergeParams( allParamDefs, defaultStepTypeParams.get(), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_DEFAULT)); } // Merge in workflow and step info ParamsMergeHelper.mergeParams( allParamDefs, injectWorkflowAndStepInfoParams(workflowSummary, runtimeSummary), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_INJECTED)); // merge step run param and user provided restart step run params // first to get undefined params from both run param and restart params Map<String, ParamDefinition> undefinedRestartParams = new LinkedHashMap<>(); Optional<Map<String, ParamDefinition>> stepRestartParams = getUserStepRestartParam(workflowSummary, runtimeSummary); stepRestartParams.ifPresent(undefinedRestartParams::putAll); Optional<Map<String, ParamDefinition>> stepRunParams = getStepRunParams(workflowSummary, runtimeSummary); Map<String, ParamDefinition> systemInjectedRestartRunParams = new LinkedHashMap<>(); stepRunParams.ifPresent( params -> { params.forEach( (key, val) -> { if (runtimeSummary.getRestartConfig() != null && Constants.RESERVED_PARAM_NAMES.contains(key) && val.getMode() == ParamMode.CONSTANT && val.getSource() == ParamSource.SYSTEM_INJECTED) { ((AbstractParamDefinition) val) .getMeta() .put(Constants.METADATA_SOURCE_KEY, ParamSource.RESTART.name()); systemInjectedRestartRunParams.put(key, val); } }); systemInjectedRestartRunParams.keySet().forEach(params::remove); }); stepRunParams.ifPresent(undefinedRestartParams::putAll); Optional.ofNullable(stepDefinition.getParams()) .ifPresent( stepDefParams -> stepDefParams.keySet().stream() .filter(undefinedRestartParams::containsKey) .forEach(undefinedRestartParams::remove)); // Then merge undefined restart params if (!undefinedRestartParams.isEmpty()) { mergeUserProvidedStepParams(allParamDefs, undefinedRestartParams, workflowSummary); } // Final merge from step definition if (stepDefinition.getParams() != null) { maybeOverrideParamType(stepDefinition.getParams()); ParamsMergeHelper.mergeParams( allParamDefs, stepDefinition.getParams(), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.DEFINITION)); } // merge step run params stepRunParams.ifPresent( stepParams -> mergeUserProvidedStepParams(allParamDefs, stepParams, workflowSummary)); // merge all user provided restart step run params stepRestartParams.ifPresent( stepParams -> mergeUserProvidedStepParams(allParamDefs, stepParams, workflowSummary)); // merge all system injected restart step run params with mode and source already set. allParamDefs.putAll(systemInjectedRestartRunParams); // Cleanup any params that are missing and convert to params return ParamsMergeHelper.convertToParameters(ParamsMergeHelper.cleanupParams(allParamDefs)); }
@Test public void testRestartConfigStepRunParamMerge() { Map<String, Map<String, ParamDefinition>> stepRunParams = singletonMap( "stepid", singletonMap("p1", ParamDefinition.buildParamDefinition("p1", "d1"))); Map<String, Map<String, ParamDefinition>> stepRestartParams = singletonMap( "stepid", singletonMap("p2", ParamDefinition.buildParamDefinition("p2", "d2"))); ManualInitiator manualInitiator = new ManualInitiator(); workflowSummary.setInitiator(manualInitiator); workflowSummary.setStepRunParams(stepRunParams); workflowSummary.setRestartConfig( RestartConfig.builder() .addRestartNode("sample-wf-map-params", 1, "foo") .stepRestartParams(stepRestartParams) .build()); workflowSummary.setRunPolicy(RunPolicy.RESTART_FROM_SPECIFIC); Map<String, Parameter> stepParams = paramsManager.generateMergedStepParams(workflowSummary, step, stepRuntime, runtimeSummary); Assert.assertEquals("d1", stepParams.get("p1").asStringParam().getValue()); Assert.assertEquals("d2", stepParams.get("p2").asStringParam().getValue()); Assert.assertEquals(ParamSource.RESTART, stepParams.get("p1").getSource()); Assert.assertEquals(ParamSource.RESTART, stepParams.get("p2").getSource()); Assert.assertEquals( Arrays.asList("p2", "p1"), new ArrayList<>(stepParams.keySet()).subList(stepParams.size() - 2, stepParams.size())); }
public static ObjectEncoder createEncoder(Type type, ObjectInspector inspector) { String base = type.getTypeSignature().getBase(); switch (base) { case BIGINT: checkArgument(inspector instanceof PrimitiveObjectInspector); return compose(primitive(inspector), o -> ((Long) o)); case INTEGER: checkArgument(inspector instanceof PrimitiveObjectInspector); return compose(primitive(inspector), o -> ((Integer) o).longValue()); case SMALLINT: checkArgument(inspector instanceof PrimitiveObjectInspector); return compose(primitive(inspector), o -> ((Short) o).longValue()); case TINYINT: checkArgument(inspector instanceof PrimitiveObjectInspector); return compose(primitive(inspector), o -> ((Byte) o).longValue()); case BOOLEAN: checkArgument(inspector instanceof PrimitiveObjectInspector); return compose(primitive(inspector), o -> ((Boolean) o)); case DATE: checkArgument(inspector instanceof PrimitiveObjectInspector); return compose(primitive(inspector), o -> ((Date) o).getTime()); case DECIMAL: if (Decimals.isShortDecimal(type)) { DecimalType decimalType = (DecimalType) type; return compose(decimal(inspector), o -> DecimalUtils.encodeToLong((BigDecimal) o, decimalType)); } else if (Decimals.isLongDecimal(type)) { DecimalType decimalType = (DecimalType) type; return compose(decimal(inspector), o -> DecimalUtils.encodeToSlice((BigDecimal) o, decimalType)); } break; case REAL: checkArgument(inspector instanceof PrimitiveObjectInspector); return compose(primitive(inspector), o -> floatToRawIntBits(((Number) o).floatValue())); case DOUBLE: checkArgument(inspector instanceof PrimitiveObjectInspector); return compose(primitive(inspector), o -> (Double) o); case TIMESTAMP: checkArgument(inspector instanceof PrimitiveObjectInspector); return compose(primitive(inspector), o -> ((Timestamp) o).getTime()); case VARBINARY: if (inspector instanceof BinaryObjectInspector) { return compose(primitive(inspector), o -> Slices.wrappedBuffer(((byte[]) o))); } break; case VARCHAR: if (inspector instanceof StringObjectInspector) { return compose(primitive(inspector), o -> Slices.utf8Slice(o.toString())); } else if (inspector instanceof HiveVarcharObjectInspector) { return compose(o -> ((HiveVarcharObjectInspector) inspector).getPrimitiveJavaObject(o).getValue(), o -> Slices.utf8Slice(((String) o))); } break; case CHAR: if (inspector instanceof StringObjectInspector) { return compose(primitive(inspector), o -> Slices.utf8Slice(o.toString())); } else if (inspector instanceof HiveCharObjectInspector) { return compose(o -> ((HiveCharObjectInspector) inspector).getPrimitiveJavaObject(o).getValue(), o -> Slices.utf8Slice(((String) o))); } break; case ROW: return StructObjectEncoder.create(type, inspector); case ARRAY: return ListObjectEncoder.create(type, inspector); case MAP: return MapObjectEncoder.create(type, inspector); } throw unsupportedType(type); }
@Test public void testPrimitiveObjectEncoders() { ObjectInspector inspector; ObjectEncoder encoder; inspector = writableLongObjectInspector; encoder = createEncoder(BIGINT, inspector); assertTrue(encoder.encode(new LongWritable(123456L)) instanceof Long); inspector = writableIntObjectInspector; encoder = createEncoder(INTEGER, inspector); assertTrue(encoder.encode(new IntWritable(12345)) instanceof Long); inspector = writableShortObjectInspector; encoder = createEncoder(SMALLINT, inspector); assertTrue(encoder.encode(new ShortWritable((short) 1234)) instanceof Long); inspector = writableByteObjectInspector; encoder = createEncoder(TINYINT, inspector); assertTrue(encoder.encode(new ByteWritable((byte) 123)) instanceof Long); inspector = writableBooleanObjectInspector; encoder = createEncoder(BOOLEAN, inspector); assertTrue(encoder.encode(new BooleanWritable(true)) instanceof Boolean); inspector = writableDoubleObjectInspector; encoder = createEncoder(DOUBLE, inspector); assertTrue(encoder.encode(new DoubleWritable(0.1)) instanceof Double); inspector = writableDateObjectInspector; encoder = createEncoder(DATE, inspector); assertTrue(encoder.encode(new DateWritable(DateTimeUtils.createDate(18380L))) instanceof Long); inspector = writableHiveDecimalObjectInspector; encoder = createEncoder(createDecimalType(11, 10), inspector); assertTrue(encoder.encode(new HiveDecimalWritable("1.2345678910")) instanceof Long); encoder = createEncoder(createDecimalType(34, 33), inspector); assertTrue(encoder.encode(new HiveDecimalWritable("1.281734081274028174012432412423134")) instanceof Slice); }
void handleJobLevelCheckpointException( CheckpointProperties checkpointProperties, CheckpointException exception, long checkpointId) { if (!checkpointProperties.isSavepoint()) { checkFailureAgainstCounter(exception, checkpointId, failureCallback::failJob); } }
@Test void testTotalCountValue() { TestFailJobCallback callback = new TestFailJobCallback(); CheckpointProperties checkpointProperties = forCheckpoint(NEVER_RETAIN_AFTER_TERMINATION); CheckpointFailureManager failureManager = new CheckpointFailureManager(0, callback); for (CheckpointFailureReason reason : CheckpointFailureReason.values()) { failureManager.handleJobLevelCheckpointException( checkpointProperties, new CheckpointException(reason), -2); } // IO_EXCEPTION, CHECKPOINT_DECLINED, FINALIZE_CHECKPOINT_FAILURE, CHECKPOINT_EXPIRED and // CHECKPOINT_ASYNC_EXCEPTION assertThat(callback.getInvokeCounter()).isEqualTo(5); }
public boolean isAssociatedWithEnvironment(String environmentName) { return this.environmentName != null && this.environmentName.equals(environmentName); }
@Test public void shouldUnderstandWhenAssociatedWithGivenEnvironment() { EnvironmentPipelineModel foo = new EnvironmentPipelineModel("foo", "env"); assertThat(foo.isAssociatedWithEnvironment("env"), is(true)); assertThat(foo.isAssociatedWithEnvironment("env2"), is(false)); assertThat(foo.isAssociatedWithEnvironment(null), is(false)); foo = new EnvironmentPipelineModel("foo"); assertThat(foo.isAssociatedWithEnvironment("env"), is(false)); assertThat(foo.isAssociatedWithEnvironment("env2"), is(false)); assertThat(foo.isAssociatedWithEnvironment(null), is(false)); }
@Nullable public static EpoxyModel<?> getModelFromPayload(List<Object> payloads, long modelId) { if (payloads.isEmpty()) { return null; } for (Object payload : payloads) { DiffPayload diffPayload = (DiffPayload) payload; if (diffPayload.singleModel != null) { if (diffPayload.singleModel.id() == modelId) { return diffPayload.singleModel; } } else { EpoxyModel<?> modelForId = diffPayload.modelsById.get(modelId); if (modelForId != null) { return modelForId; } } } return null; }
@Test public void getMultipleModelsFromPayload() { TestModel model1 = new TestModel(); TestModel model2 = new TestModel(); List<Object> payloads = payloadsWithChangedModels(model1, model2); EpoxyModel<?> modelFromPayload1 = getModelFromPayload(payloads, model1.id()); EpoxyModel<?> modelFromPayload2 = getModelFromPayload(payloads, model2.id()); assertEquals(model1, modelFromPayload1); assertEquals(model2, modelFromPayload2); }
OffsetAndEpoch findHighestRemoteOffset(TopicIdPartition topicIdPartition, UnifiedLog log) throws RemoteStorageException { OffsetAndEpoch offsetAndEpoch = null; Option<LeaderEpochFileCache> leaderEpochCacheOpt = log.leaderEpochCache(); if (leaderEpochCacheOpt.isDefined()) { LeaderEpochFileCache cache = leaderEpochCacheOpt.get(); Optional<EpochEntry> maybeEpochEntry = cache.latestEntry(); while (offsetAndEpoch == null && maybeEpochEntry.isPresent()) { int epoch = maybeEpochEntry.get().epoch; Optional<Long> highestRemoteOffsetOpt = remoteLogMetadataManager.highestOffsetForEpoch(topicIdPartition, epoch); if (highestRemoteOffsetOpt.isPresent()) { Map.Entry<Integer, Long> entry = cache.endOffsetFor(epoch, log.logEndOffset()); int requestedEpoch = entry.getKey(); long endOffset = entry.getValue(); long highestRemoteOffset = highestRemoteOffsetOpt.get(); if (endOffset <= highestRemoteOffset) { LOGGER.info("The end-offset for epoch {}: ({}, {}) is less than or equal to the " + "highest-remote-offset: {} for partition: {}", epoch, requestedEpoch, endOffset, highestRemoteOffset, topicIdPartition); offsetAndEpoch = new OffsetAndEpoch(endOffset - 1, requestedEpoch); } else { offsetAndEpoch = new OffsetAndEpoch(highestRemoteOffset, epoch); } } maybeEpochEntry = cache.previousEntry(epoch); } } if (offsetAndEpoch == null) { offsetAndEpoch = new OffsetAndEpoch(-1L, RecordBatch.NO_PARTITION_LEADER_EPOCH); } return offsetAndEpoch; }
@Test void testFindHighestRemoteOffsetOnEmptyRemoteStorage() throws RemoteStorageException { List<EpochEntry> totalEpochEntries = Arrays.asList( new EpochEntry(0, 0), new EpochEntry(1, 500) ); checkpoint.write(totalEpochEntries); LeaderEpochFileCache cache = new LeaderEpochFileCache(tp, checkpoint, scheduler); when(mockLog.leaderEpochCache()).thenReturn(Option.apply(cache)); TopicIdPartition tpId = new TopicIdPartition(Uuid.randomUuid(), tp); OffsetAndEpoch offsetAndEpoch = remoteLogManager.findHighestRemoteOffset(tpId, mockLog); assertEquals(new OffsetAndEpoch(-1L, -1), offsetAndEpoch); }
public GsonAzureProjectList getProjects(String serverUrl, String token) { String url = String.format("%s/_apis/projects?%s", getTrimmedUrl(serverUrl), API_VERSION_3); return doGet(token, url, r -> buildGson().fromJson(r.body().charStream(), GsonAzureProjectList.class)); }
@Test public void get_projects_with_invalid_url() { enqueueResponse(404); assertThatThrownBy(() -> underTest.getProjects(server.url("").toString(), "invalid-token")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Invalid Azure URL"); assertThat(logTester.logs(Level.ERROR)).hasSize(1); assertThat(logTester.logs(Level.ERROR).iterator().next()) .contains("Unable to contact Azure DevOps server for request [" + server.url("") + "_apis/projects?api-version=3.0]: URL Not Found"); }
@Override public DynamicAwareEntry prepare(Exchange exchange, String uri, String originalUri) throws Exception { Map<String, Object> properties = endpointProperties(exchange, uri); URI normalizedUri = URISupport.normalizeUriAsURI(uri); String controlAction = URISupport.extractRemainderPath(normalizedUri, false); properties.put(CONTROL_ACTION_PROPERTY, controlAction); return new DynamicAwareEntry(uri, originalUri, properties, null); }
@Test void prepare() throws Exception { String originalUri = "dynamic-router-control://subscribe?subscriptionId=testSub1"; String uri = "dynamic-router-control://subscribe?subscriptionId=testSub1"; try (DynamicRouterControlChannelSendDynamicAware testSubject = new DynamicRouterControlChannelSendDynamicAware()) { SendDynamicAware.DynamicAwareEntry entry = testSubject.prepare(exchange, uri, originalUri); assertAll( () -> assertEquals(entry.getOriginalUri(), originalUri), () -> assertEquals(entry.getUri(), uri), () -> assertEquals(2, entry.getProperties().size()), () -> assertEquals("subscribe", entry.getProperties().get("controlAction")), () -> assertEquals("testSub1", entry.getProperties().get("subscriptionId"))); } }
public void decode(ByteBuf buffer) { boolean last; int statusCode; while (true) { switch(state) { case READ_COMMON_HEADER: if (buffer.readableBytes() < SPDY_HEADER_SIZE) { return; } int frameOffset = buffer.readerIndex(); int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET; int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET; buffer.skipBytes(SPDY_HEADER_SIZE); boolean control = (buffer.getByte(frameOffset) & 0x80) != 0; int version; int type; if (control) { // Decode control frame common header version = getUnsignedShort(buffer, frameOffset) & 0x7FFF; type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET); streamId = 0; // Default to session Stream-ID } else { // Decode data frame common header version = spdyVersion; // Default to expected version type = SPDY_DATA_FRAME; streamId = getUnsignedInt(buffer, frameOffset); } flags = buffer.getByte(flagsOffset); length = getUnsignedMedium(buffer, lengthOffset); // Check version first then validity if (version != spdyVersion) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SPDY Version"); } else if (!isValidFrameHeader(streamId, type, flags, length)) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid Frame Error"); } else { state = getNextState(type, length); } break; case READ_DATA_FRAME: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0)); break; } // Generate data frames that do not exceed maxChunkSize int dataLength = Math.min(maxChunkSize, length); // Wait until entire frame is readable if (buffer.readableBytes() < dataLength) { return; } ByteBuf data = buffer.alloc().buffer(dataLength); data.writeBytes(buffer, dataLength); length -= dataLength; if (length == 0) { state = State.READ_COMMON_HEADER; } last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN); delegate.readDataFrame(streamId, last, data); break; case READ_SYN_STREAM_FRAME: if (buffer.readableBytes() < 10) { return; } int offset = buffer.readerIndex(); streamId = getUnsignedInt(buffer, offset); int associatedToStreamId = getUnsignedInt(buffer, offset + 4); byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07); last = hasFlag(flags, SPDY_FLAG_FIN); boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL); buffer.skipBytes(10); length -= 10; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_STREAM Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional); } break; case READ_SYN_REPLY_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_REPLY Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynReplyFrame(streamId, last); } break; case READ_RST_STREAM_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (streamId == 0 || statusCode == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid RST_STREAM Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readRstStreamFrame(streamId, statusCode); } break; case READ_SETTINGS_FRAME: if (buffer.readableBytes() < 4) { return; } boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR); numSettings = getUnsignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); length -= 4; // Validate frame length against number of entries. Each ID/Value entry is 8 bytes. if ((length & 0x07) != 0 || length >> 3 != numSettings) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SETTINGS Frame"); } else { state = State.READ_SETTING; delegate.readSettingsFrame(clear); } break; case READ_SETTING: if (numSettings == 0) { state = State.READ_COMMON_HEADER; delegate.readSettingsEnd(); break; } if (buffer.readableBytes() < 8) { return; } byte settingsFlags = buffer.getByte(buffer.readerIndex()); int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1); int value = getSignedInt(buffer, buffer.readerIndex() + 4); boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE); boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED); buffer.skipBytes(8); --numSettings; delegate.readSetting(id, value, persistValue, persisted); break; case READ_PING_FRAME: if (buffer.readableBytes() < 4) { return; } int pingId = getSignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); state = State.READ_COMMON_HEADER; delegate.readPingFrame(pingId); break; case READ_GOAWAY_FRAME: if (buffer.readableBytes() < 8) { return; } int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); state = State.READ_COMMON_HEADER; delegate.readGoAwayFrame(lastGoodStreamId, statusCode); break; case READ_HEADERS_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid HEADERS Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readHeadersFrame(streamId, last); } break; case READ_WINDOW_UPDATE_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (deltaWindowSize == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid WINDOW_UPDATE Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readWindowUpdateFrame(streamId, deltaWindowSize); } break; case READ_HEADER_BLOCK: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readHeaderBlockEnd(); break; } if (!buffer.isReadable()) { return; } int compressedBytes = Math.min(buffer.readableBytes(), length); ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes); headerBlock.writeBytes(buffer, compressedBytes); length -= compressedBytes; delegate.readHeaderBlock(headerBlock); break; case DISCARD_FRAME: int numBytes = Math.min(buffer.readableBytes(), length); buffer.skipBytes(numBytes); length -= numBytes; if (length == 0) { state = State.READ_COMMON_HEADER; break; } return; case FRAME_ERROR: buffer.skipBytes(buffer.readableBytes()); return; default: throw new Error("Shouldn't reach here."); } } }
@Test public void testSpdyPingFrame() throws Exception { short type = 6; byte flags = 0; int length = 4; int id = RANDOM.nextInt(); ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length); encodeControlFrameHeader(buf, type, flags, length); buf.writeInt(id); decoder.decode(buf); verify(delegate).readPingFrame(id); assertFalse(buf.isReadable()); buf.release(); }
@SuppressWarnings("MethodLength") static void dissectControlRequest( final ArchiveEventCode eventCode, final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int encodedLength = dissectLogHeader(CONTEXT, eventCode, buffer, offset, builder); HEADER_DECODER.wrap(buffer, offset + encodedLength); encodedLength += MessageHeaderDecoder.ENCODED_LENGTH; switch (eventCode) { case CMD_IN_CONNECT: CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendConnect(builder); break; case CMD_IN_CLOSE_SESSION: CLOSE_SESSION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendCloseSession(builder); break; case CMD_IN_START_RECORDING: START_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording(builder); break; case CMD_IN_STOP_RECORDING: STOP_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecording(builder); break; case CMD_IN_REPLAY: REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplay(builder); break; case CMD_IN_STOP_REPLAY: STOP_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplay(builder); break; case CMD_IN_LIST_RECORDINGS: LIST_RECORDINGS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordings(builder); break; case CMD_IN_LIST_RECORDINGS_FOR_URI: LIST_RECORDINGS_FOR_URI_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingsForUri(builder); break; case CMD_IN_LIST_RECORDING: LIST_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecording(builder); break; case CMD_IN_EXTEND_RECORDING: EXTEND_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording(builder); break; case CMD_IN_RECORDING_POSITION: RECORDING_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendRecordingPosition(builder); break; case CMD_IN_TRUNCATE_RECORDING: TRUNCATE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTruncateRecording(builder); break; case CMD_IN_STOP_RECORDING_SUBSCRIPTION: STOP_RECORDING_SUBSCRIPTION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingSubscription(builder); break; case CMD_IN_STOP_POSITION: STOP_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopPosition(builder); break; case CMD_IN_FIND_LAST_MATCHING_RECORD: FIND_LAST_MATCHING_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendFindLastMatchingRecord(builder); break; case CMD_IN_LIST_RECORDING_SUBSCRIPTIONS: LIST_RECORDING_SUBSCRIPTIONS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingSubscriptions(builder); break; case CMD_IN_START_BOUNDED_REPLAY: BOUNDED_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartBoundedReplay(builder); break; case CMD_IN_STOP_ALL_REPLAYS: STOP_ALL_REPLAYS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopAllReplays(builder); break; case CMD_IN_REPLICATE: REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate(builder); break; case CMD_IN_STOP_REPLICATION: STOP_REPLICATION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplication(builder); break; case CMD_IN_START_POSITION: START_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartPosition(builder); break; case CMD_IN_DETACH_SEGMENTS: DETACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDetachSegments(builder); break; case CMD_IN_DELETE_DETACHED_SEGMENTS: DELETE_DETACHED_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDeleteDetachedSegments(builder); break; case CMD_IN_PURGE_SEGMENTS: PURGE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeSegments(builder); break; case CMD_IN_ATTACH_SEGMENTS: ATTACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAttachSegments(builder); break; case CMD_IN_MIGRATE_SEGMENTS: MIGRATE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendMigrateSegments(builder); break; case CMD_IN_AUTH_CONNECT: AUTH_CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAuthConnect(builder); break; case CMD_IN_KEEP_ALIVE: KEEP_ALIVE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendKeepAlive(builder); break; case CMD_IN_TAGGED_REPLICATE: TAGGED_REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTaggedReplicate(builder); break; case CMD_IN_START_RECORDING2: START_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording2(builder); break; case CMD_IN_EXTEND_RECORDING2: EXTEND_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording2(builder); break; case CMD_IN_STOP_RECORDING_BY_IDENTITY: STOP_RECORDING_BY_IDENTITY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingByIdentity(builder); break; case CMD_IN_PURGE_RECORDING: PURGE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeRecording(builder); break; case CMD_IN_REPLICATE2: REPLICATE_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate2(builder); break; case CMD_IN_REQUEST_REPLAY_TOKEN: REPLAY_TOKEN_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplayToken(builder); break; default: builder.append(": unknown command"); } }
@Test void controlRequestStartBoundedReplay() { internalEncodeLogHeader(buffer, 0, 90, 90, () -> 10_325_000_000L); final BoundedReplayRequestEncoder requestEncoder = new BoundedReplayRequestEncoder(); requestEncoder.wrapAndApplyHeader(buffer, LOG_HEADER_LENGTH, headerEncoder) .controlSessionId(10) .correlationId(20) .recordingId(30) .position(40) .length(50) .limitCounterId(-123) .replayStreamId(14) .replayChannel("rep ch"); dissectControlRequest(CMD_IN_START_BOUNDED_REPLAY, buffer, 0, builder); assertEquals("[10.325000000] " + CONTEXT + ": " + CMD_IN_START_BOUNDED_REPLAY.name() + " [90/90]:" + " controlSessionId=10" + " correlationId=20" + " recordingId=30" + " position=40" + " length=50" + " limitCounterId=-123" + " replayStreamId=14" + " replayChannel=rep ch", builder.toString()); }
@Override public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException { this.delete(files, prompt, callback, new HostPreferences(session.getHost()).getBoolean("openstack.delete.largeobject.segments")); }
@Test public void testDeletePlaceholder() throws Exception { final AtomicBoolean delete = new AtomicBoolean(); final String name = "placeholder-" + UUID.randomUUID().toString(); session.withListener(new TranscriptListener() { @Override public void log(final Type request, final String message) { switch(request) { case request: if(("DELETE /v1/MossoCloudFS_59113590-c679-46c3-bf62-9d7c3d5176ee/test.cyberduck.ch/" + name + " HTTP/1.1").equals(message)) { delete.set(true); } } } }); final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final Path placeholder = new Path(container, name, EnumSet.of(Path.Type.directory)); new SwiftDirectoryFeature(session).mkdir(placeholder, new TransferStatus()); final SwiftFindFeature find = new SwiftFindFeature(session); assertTrue(find.find(placeholder)); new SwiftDeleteFeature(session).delete(Collections.singletonList(placeholder), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertTrue(delete.get()); Thread.sleep(1000L); assertFalse(find.find(placeholder)); }
public void setFilter(String filter) { if (filter.equals(null)) { throw new IllegalArgumentException( "Filter value cannot be null. Valid values are point, hermite, " + "cubic, box, gaussian, catrom, triangle, quadratic and mitchell."); } String[] allowedFilters = {"Point", "Hermite", "Cubic", "Box", "Gaussian", "Catrom", "Triangle", "Quadratic", "Mitchell"}; for (String allowedFilter : allowedFilters) { if (filter.equalsIgnoreCase(allowedFilter)) { this.filter = filter; userConfigured.add("filter"); return; } } throw new IllegalArgumentException( "Invalid filter value. Valid values are point, hermite, " + "cubic, box, gaussian, catrom, triangle, quadratic and mitchell."); }
@Test public void testValidateFilter() { TesseractOCRConfig config = new TesseractOCRConfig(); config.setFilter("Triangle"); config.setFilter("box"); assertTrue(true, "Couldn't set valid values"); assertThrows(IllegalArgumentException.class, () -> { config.setFilter("abc"); }); }
public static <T extends Comparable<T>> void assertTypeValid( Column<T> foundColumn, PrimitiveTypeName primitiveType) { Class<T> foundColumnType = foundColumn.getColumnType(); ColumnPath columnPath = foundColumn.getColumnPath(); Set<PrimitiveTypeName> validTypeDescriptors = classToParquetType.get(foundColumnType); if (validTypeDescriptors == null) { StringBuilder message = new StringBuilder(); message.append("Column ") .append(columnPath.toDotString()) .append(" was declared as type: ") .append(foundColumnType.getName()) .append(" which is not supported in FilterPredicates."); Set<Class<?>> supportedTypes = parquetTypeToClass.get(primitiveType); if (supportedTypes != null) { message.append(" Supported types for this column are: ").append(supportedTypes); } else { message.append(" There are no supported types for columns of " + primitiveType); } throw new IllegalArgumentException(message.toString()); } if (!validTypeDescriptors.contains(primitiveType)) { StringBuilder message = new StringBuilder(); message.append("FilterPredicate column: ") .append(columnPath.toDotString()) .append("'s declared type (") .append(foundColumnType.getName()) .append(") does not match the schema found in file metadata. Column ") .append(columnPath.toDotString()) .append(" is of type: ") .append(primitiveType) .append("\nValid types for this column are: ") .append(parquetTypeToClass.get(primitiveType)); throw new IllegalArgumentException(message.toString()); } }
@Test public void testValidTypes() { assertTypeValid(intColumn, PrimitiveTypeName.INT32); assertTypeValid(longColumn, PrimitiveTypeName.INT64); assertTypeValid(floatColumn, PrimitiveTypeName.FLOAT); assertTypeValid(doubleColumn, PrimitiveTypeName.DOUBLE); assertTypeValid(booleanColumn, PrimitiveTypeName.BOOLEAN); assertTypeValid(binaryColumn, PrimitiveTypeName.BINARY); assertTypeValid(binaryColumn, PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY); assertTypeValid(binaryColumn, PrimitiveTypeName.INT96); }
public static boolean isFloatingNumber(String text) { final int startPos = findStartPosition(text); if (startPos < 0) { return false; } boolean dots = false; for (int i = startPos; i < text.length(); i++) { char ch = text.charAt(i); if (!Character.isDigit(ch)) { if (ch == '.') { if (dots) { return false; } dots = true; } else { return false; } } } return true; }
@Test @DisplayName("Tests that isFloatingNumber returns false for non-numeric chars") void isFloatingNumberNonNumeric() { assertFalse(ObjectHelper.isFloatingNumber("ABC")); assertFalse(ObjectHelper.isFloatingNumber("-ABC")); assertFalse(ObjectHelper.isFloatingNumber("ABC.0")); assertFalse(ObjectHelper.isFloatingNumber("-ABC.0")); assertFalse(ObjectHelper.isFloatingNumber("!@#$#$%@#$%")); // TODO: fix ... currently it returns true for this //assertFalse(ObjectHelper.isFloatingNumber(".")); assertFalse(ObjectHelper.isFloatingNumber("-")); }
@Override public Long sendSingleMailToAdmin(String mail, Long userId, String templateCode, Map<String, Object> templateParams) { // 如果 mail 为空,则加载用户编号对应的邮箱 if (StrUtil.isEmpty(mail)) { AdminUserDO user = adminUserService.getUser(userId); if (user != null) { mail = user.getEmail(); } } // 执行发送 return sendSingleMail(mail, userId, UserTypeEnum.ADMIN.getValue(), templateCode, templateParams); }
@Test public void testSendSingleMailToAdmin() { // 准备参数 Long userId = randomLongId(); String templateCode = RandomUtils.randomString(); Map<String, Object> templateParams = MapUtil.<String, Object>builder().put("code", "1234") .put("op", "login").build(); // mock adminUserService 的方法 AdminUserDO user = randomPojo(AdminUserDO.class, o -> o.setMobile("15601691300")); when(adminUserService.getUser(eq(userId))).thenReturn(user); // mock MailTemplateService 的方法 MailTemplateDO template = randomPojo(MailTemplateDO.class, o -> { o.setStatus(CommonStatusEnum.ENABLE.getStatus()); o.setContent("验证码为{code}, 操作为{op}"); o.setParams(Lists.newArrayList("code", "op")); }); when(mailTemplateService.getMailTemplateByCodeFromCache(eq(templateCode))).thenReturn(template); String title = RandomUtils.randomString(); when(mailTemplateService.formatMailTemplateContent(eq(template.getTitle()), eq(templateParams))) .thenReturn(title); String content = RandomUtils.randomString(); when(mailTemplateService.formatMailTemplateContent(eq(template.getContent()), eq(templateParams))) .thenReturn(content); // mock MailAccountService 的方法 MailAccountDO account = randomPojo(MailAccountDO.class); when(mailAccountService.getMailAccountFromCache(eq(template.getAccountId()))).thenReturn(account); // mock MailLogService 的方法 Long mailLogId = randomLongId(); when(mailLogService.createMailLog(eq(userId), eq(UserTypeEnum.ADMIN.getValue()), eq(user.getEmail()), eq(account), eq(template), eq(content), eq(templateParams), eq(true))).thenReturn(mailLogId); // 调用 Long resultMailLogId = mailSendService.sendSingleMailToAdmin(null, userId, templateCode, templateParams); // 断言 assertEquals(mailLogId, resultMailLogId); // 断言调用 verify(mailProducer).sendMailSendMessage(eq(mailLogId), eq(user.getEmail()), eq(account.getId()), eq(template.getNickname()), eq(title), eq(content)); }
@Override public Set<Class<?>> classes() { Set<Class<?>> classes = super.classes(); Set<Class<?>> output = new HashSet<>(); if (application != null) { Set<Class<?>> clzs = application.getClasses(); if (clzs != null) { for (Class<?> clz : clzs) { if (!isIgnored(clz.getName())) { output.add(clz); } } } Set<Object> singletons = application.getSingletons(); if (singletons != null) { for (Object o : singletons) { if (!isIgnored(o.getClass().getName())) { output.add(o.getClass()); } } } } classes.addAll(output); return classes; }
@Test(description = "scan classes from all packages and Application") public void shouldScanClassesFromAllPackagesAndApplication() throws Exception { assertTrue(scanner.classes().contains(ResourceInPackageA.class)); assertTrue(scanner.classes().contains(ResourceInPackageB.class)); assertTrue(scanner.classes().contains(ResourceInApplication.class)); }
void handleStatement(final QueuedCommand queuedCommand) { throwIfNotConfigured(); handleStatementWithTerminatedQueries( queuedCommand.getAndDeserializeCommand(commandDeserializer), queuedCommand.getAndDeserializeCommandId(), queuedCommand.getStatus(), Mode.EXECUTE, queuedCommand.getOffset(), false ); }
@Test public void shouldStartQueryForPlannedCommand() { // Given: givenMockPlannedQuery(); // When: handleStatement(statementExecutorWithMocks, plannedCommand, COMMAND_ID, Optional.empty(), 0L); // Then: verify(mockQueryMetadata).start(); }
public static <T> MutationDetector forValueWithCoder(T value, Coder<T> coder) throws CoderException { if (value == null) { return noopMutationDetector(); } else { return new CodedValueMutationDetector<>(value, coder); } }
@Test public void testUnmodifiedLinkedList() throws Exception { List<Integer> value = Lists.newLinkedList(Arrays.asList(1, 2, 3, 4)); MutationDetector detector = MutationDetectors.forValueWithCoder(value, ListCoder.of(VarIntCoder.of())); detector.verifyUnmodified(); }
public static SinkConfig validateUpdate(SinkConfig existingConfig, SinkConfig newConfig) { SinkConfig mergedConfig = clone(existingConfig); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Sink Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getSourceSubscriptionName()) && !newConfig.getSourceSubscriptionName() .equals(existingConfig.getSourceSubscriptionName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().putIfAbsent(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getTopicToSerdeClassName() != null) { newConfig.getTopicToSerdeClassName().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getTopicToSchemaType() != null) { newConfig.getTopicToSchemaType().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { SinkConfig finalMergedConfig = mergedConfig; newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException( "isRegexPattern for input topic " + topicName + " cannot be altered"); } finalMergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees() .equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getConfigs() != null) { mergedConfig.setConfigs(newConfig.getConfigs()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering() .equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering() .equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getResources() != null) { mergedConfig .setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getArchive())) { mergedConfig.setArchive(newConfig.getArchive()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } if (newConfig.getTransformFunction() != null) { mergedConfig.setTransformFunction(newConfig.getTransformFunction()); } if (newConfig.getTransformFunctionClassName() != null) { mergedConfig.setTransformFunctionClassName(newConfig.getTransformFunctionClassName()); } if (newConfig.getTransformFunctionConfig() != null) { mergedConfig.setTransformFunctionConfig(newConfig.getTransformFunctionConfig()); } return mergedConfig; }
@Test public void testMergeDifferentInputSpecWithInputsSet() { SinkConfig sinkConfig = createSinkConfig(); sinkConfig.getInputSpecs().put("test-input", ConsumerConfig.builder().isRegexPattern(false).receiverQueueSize(1000).build()); Map<String, ConsumerConfig> inputSpecs = new HashMap<>(); ConsumerConfig newConsumerConfig = ConsumerConfig.builder().isRegexPattern(false).serdeClassName("test-serde").receiverQueueSize(58).build(); inputSpecs.put("test-input", newConsumerConfig); SinkConfig newSinkConfig = createUpdatedSinkConfig("inputSpecs", inputSpecs); newSinkConfig.setInputs(new ArrayList<>()); newSinkConfig.getInputs().add("test-input"); SinkConfig mergedConfig = SinkConfigUtils.validateUpdate(sinkConfig, newSinkConfig); assertEquals(mergedConfig.getInputSpecs().get("test-input"), newConsumerConfig); // make sure original sinkConfig was not modified assertEquals(sinkConfig.getInputSpecs().get("test-input").getReceiverQueueSize().intValue(), 1000); }
@VisibleForTesting void handleResponse(DiscoveryResponseData response) { ResourceType resourceType = response.getResourceType(); switch (resourceType) { case NODE: handleD2NodeResponse(response); break; case D2_URI_MAP: handleD2URIMapResponse(response); break; case D2_URI: handleD2URICollectionResponse(response); break; default: throw new AssertionError("Missing case in enum switch: " + resourceType); } }
@Test public void testHandleD2NodeResponseWithData() { XdsClientImplFixture fixture = new XdsClientImplFixture(); // subscriber original data is null fixture._nodeSubscriber.setData(null); fixture._xdsClientImpl.handleResponse(DISCOVERY_RESPONSE_NODE_DATA1); fixture.verifyAckSent(1); verify(fixture._resourceWatcher).onChanged(eq(NODE_UPDATE1)); verifyZeroInteractions(fixture._serverMetricsProvider); // initial update should not track latency XdsClient.NodeUpdate actualData = (XdsClient.NodeUpdate) fixture._nodeSubscriber.getData(); // subscriber data should be updated to NODE_UPDATE1 Assert.assertEquals(Objects.requireNonNull(actualData).getNodeData(), NODE_UPDATE1.getNodeData()); // subscriber original data is invalid, xds server latency won't be tracked fixture._nodeSubscriber.setData(new XdsClient.NodeUpdate(null)); fixture._xdsClientImpl.handleResponse(DISCOVERY_RESPONSE_NODE_DATA1); fixture.verifyAckSent(2); verify(fixture._resourceWatcher, times(2)).onChanged(eq(NODE_UPDATE1)); verifyZeroInteractions(fixture._serverMetricsProvider); // subscriber data should be updated to NODE_UPDATE2 fixture._xdsClientImpl.handleResponse(DISCOVERY_RESPONSE_NODE_DATA2); actualData = (XdsClient.NodeUpdate) fixture._nodeSubscriber.getData(); verify(fixture._resourceWatcher).onChanged(eq(NODE_UPDATE2)); verify(fixture._serverMetricsProvider).trackLatency(anyLong()); Assert.assertEquals(actualData.getNodeData(), NODE_UPDATE2.getNodeData()); }
@ScalarFunction("quantile_at_value") @Description("Given an input x between min/max values of qdigest, find which quantile is represented by that value") @SqlType(StandardTypes.DOUBLE) @SqlNullable public static Double quantileAtValueBigint(@SqlType("qdigest(bigint)") Slice input, @SqlType(StandardTypes.BIGINT) long value) { QuantileDigest digest = new QuantileDigest(input); if (digest.getCount() == 0 || value > digest.getMax() || value < digest.getMin()) { return null; } double bucketCount = digest.getHistogram(ImmutableList.of(value)).get(0).getCount(); return bucketCount / digest.getCount(); }
@Test public void testQuantileAtValueBigint() { QuantileDigest qdigest = new QuantileDigest(1); addAll(qdigest, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9); functionAssertions.assertFunction(format("quantile_at_value(CAST(X'%s' AS qdigest(bigint)), 20)", toHexString(qdigest)), DOUBLE, null); functionAssertions.assertFunction(format("quantile_at_value(CAST(X'%s' AS qdigest(bigint)), 6)", toHexString(qdigest)), DOUBLE, 0.6); functionAssertions.assertFunction(format("quantile_at_value(CAST(X'%s' AS qdigest(bigint)), -1)", toHexString(qdigest)), DOUBLE, null); }
public static Optional<JsonNode> convertFromStringToJSONNode(String json) { if (json == null || json.isEmpty()) { return Optional.empty(); } try { ObjectMapper objectMapper = new ObjectMapper(); JsonNode jsonNode = objectMapper.readTree(json); return Optional.of(jsonNode); } catch (JsonParseException e) { return Optional.empty(); } catch (IOException e) { throw new IllegalArgumentException("Generic error during json parsing: " + json, e); } }
@Test public void convertFromStringToJSONNode_manyCases() { assertThat(convertFromStringToJSONNode(null)).isNotPresent(); assertThat(convertFromStringToJSONNode("Not json")).isNotPresent(); assertThat(convertFromStringToJSONNode("\"Not json")).isNotPresent(); assertThat(convertFromStringToJSONNode("key : notJson\"")).isNotPresent(); assertThat(convertFromStringToJSONNode("[key : 100]")).isNotPresent(); assertThat(convertFromStringToJSONNode("{\"key\" : 100{")).isNotPresent(); assertThat(convertFromStringToJSONNode("{key : 100}")).isNotPresent(); assertThat(convertFromStringToJSONNode("\"Json\"")).isPresent(); assertThat(convertFromStringToJSONNode("\"key : Json\"")).isPresent(); assertThat(convertFromStringToJSONNode("{ \"id\": 2, \"username\": \"user\", \"num\": 12, \"name\": \"Mr Yellow\"\n }")).isPresent(); assertThat(convertFromStringToJSONNode("{ \"users\": [\n" + "\t\t{ \"id\": 3, \"username\": \"user45\", \"num\": 24, \"name\": \"Mr White\" },\n" + "\t\t{ \"id\": 4, \"username\": \"user65\", \"num\": 32, \"name\": \"Mr Red\" }\n" + "\t]}")).isPresent(); assertThat(convertFromStringToJSONNode("[{\"name\": \"\\\"John\\\"\"}, " + "{\"name\": \"\\\"John\\\"\", \"names\" : [{\"value\": \"\\\"Anna\\\"\"}, {\"value\": \"\\\"Mario\\\"\"}]}]")).isPresent(); assertThat(convertFromStringToJSONNode("[1,2,3]")).isPresent(); assertThat(convertFromStringToJSONNode("{\"id\": 23, \"num\": 34, \"time\" : 56}")).isPresent(); assertThat(convertFromStringToJSONNode("{\"married\":true, \"num\":34, \"name\": \"john\"}")).as("Combine three data types in object").isPresent(); assertThat(convertFromStringToJSONNode("[{\"married\":true,\"num\":34,\"name\":\"john\"}," + "{\"married\":false,\"num\":43,\"name\":\"jane\"}]")).as("Combine three data types in array").isPresent(); assertThat(convertFromStringToJSONNode("{\"is married\":\"yes, is\"}")).as("Whitespaces").isPresent(); }
@Override public void dropDb(String dbName, boolean isForceDrop) throws MetaNotFoundException { if (listTableNames(dbName).size() != 0) { throw new StarRocksConnectorException("Database %s not empty", dbName); } icebergCatalog.dropDb(dbName); databases.remove(dbName); }
@Test public void testDropNotEmptyTable() { IcebergHiveCatalog icebergHiveCatalog = new IcebergHiveCatalog(CATALOG_NAME, new Configuration(), DEFAULT_CONFIG); IcebergMetadata metadata = new IcebergMetadata(CATALOG_NAME, HDFS_ENVIRONMENT, icebergHiveCatalog, Executors.newSingleThreadExecutor(), Executors.newSingleThreadExecutor(), null); List<TableIdentifier> mockTables = new ArrayList<>(); mockTables.add(TableIdentifier.of("table1")); mockTables.add(TableIdentifier.of("table2")); new Expectations(icebergHiveCatalog) { { icebergHiveCatalog.listTables("iceberg_db"); result = mockTables; minTimes = 0; } }; try { metadata.dropDb("iceberg_db", true); Assert.fail(); } catch (Exception e) { Assert.assertTrue(e instanceof StarRocksConnectorException); Assert.assertTrue(e.getMessage().contains("Database iceberg_db not empty")); } }
public static CommandExecutor newInstance(final CommandPacketType commandPacketType, final PostgreSQLCommandPacket commandPacket, final ConnectionSession connectionSession, final PortalContext portalContext) throws SQLException { if (commandPacket instanceof SQLReceivedPacket) { log.debug("Execute packet type: {}, sql: {}", commandPacketType, ((SQLReceivedPacket) commandPacket).getSQL()); } else { log.debug("Execute packet type: {}", commandPacketType); } if (!(commandPacket instanceof PostgreSQLAggregatedCommandPacket)) { return getCommandExecutor(commandPacketType, commandPacket, connectionSession, portalContext); } PostgreSQLAggregatedCommandPacket aggregatedCommandPacket = (PostgreSQLAggregatedCommandPacket) commandPacket; if (aggregatedCommandPacket.isContainsBatchedStatements() && aggregatedCommandPacket.getPackets().stream().noneMatch(OpenGaussComBatchBindPacket.class::isInstance)) { return new PostgreSQLAggregatedCommandExecutor(getExecutorsOfAggregatedBatchedStatements(aggregatedCommandPacket, connectionSession, portalContext)); } List<CommandExecutor> result = new ArrayList<>(aggregatedCommandPacket.getPackets().size()); for (PostgreSQLCommandPacket each : aggregatedCommandPacket.getPackets()) { result.add(getCommandExecutor((CommandPacketType) each.getIdentifier(), each, connectionSession, portalContext)); } return new PostgreSQLAggregatedCommandExecutor(result); }
@Test void assertNewOpenGaussBatchBindExecutor() throws SQLException { OpenGaussComBatchBindPacket batchBindPacket = mock(OpenGaussComBatchBindPacket.class); CommandExecutor actual = OpenGaussCommandExecutorFactory.newInstance(OpenGaussCommandPacketType.BATCH_BIND_COMMAND, batchBindPacket, connectionSession, portalContext); assertThat(actual, instanceOf(OpenGaussComBatchBindExecutor.class)); }
@Override public void execute(TestCaseState state) { Object[] args; if (method.getParameterTypes().length == 1) { args = new Object[] { new io.cucumber.java.Scenario(state) }; } else { args = new Object[0]; } invokeMethod(args); }
@Test void can_create_with_no_argument() throws Throwable { Method method = JavaHookDefinitionTest.class.getMethod("no_arguments"); JavaHookDefinition definition = new JavaHookDefinition(method, "", 0, lookup); definition.execute(state); assertTrue(invoked); }
@Override public AttributedList<Path> search(final Path workdir, final Filter<Path> regex, final ListProgressListener listener) throws BackgroundException { try { final AttributedList<Path> list = new AttributedList<>(); String prefix = null; final AttributedList<Path> containers; if(workdir.isRoot()) { containers = new B2BucketListService(session, fileid).list(new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)), listener); } else { containers = new AttributedList<>(Collections.singletonList(containerService.getContainer(workdir))); if(!containerService.isContainer(workdir)) { prefix = containerService.getKey(workdir) + Path.DELIMITER; } } for(Path container : containers) { String startFilename = prefix; do { final B2ListFilesResponse response = session.getClient().listFileNames( fileid.getVersionId(container), startFilename, new HostPreferences(session.getHost()).getInteger("b2.listing.chunksize"), prefix, null); for(B2FileInfoResponse info : response.getFiles()) { final Path f = new Path(String.format("%s%s%s", container.getAbsolute(), Path.DELIMITER, info.getFileName()), EnumSet.of(Path.Type.file)); if(regex.accept(f)) { list.add(f.withAttributes(new B2AttributesFinderFeature(session, fileid).toAttributes(info))); } } startFilename = response.getNextFileName(); } while(startFilename != null); } return list; } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map(e); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e); } }
@Test public void testSearchInDirectory() throws Exception { final String name = new AlphanumericRandomStringService().random(); final Path bucket = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path workdir = new B2DirectoryFeature(session, fileid).mkdir(new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path file = new B2TouchFeature(session, fileid).touch(new Path(workdir, name, EnumSet.of(Path.Type.file)), new TransferStatus()); final B2SearchFeature feature = new B2SearchFeature(session, fileid); assertNotNull(feature.search(workdir, new SearchFilter(name), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); assertNotNull(feature.search(workdir, new SearchFilter(StringUtils.substring(name, 2)), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); { final AttributedList<Path> result = feature.search(workdir, new SearchFilter(StringUtils.substring(name, 0, name.length() - 2)), new DisabledListProgressListener()); assertNotNull(result.find(new SimplePathPredicate(file))); assertEquals(workdir, result.get(result.indexOf(file)).getParent()); } final Path subdir = new B2DirectoryFeature(session, fileid).mkdir(new Path(workdir, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); assertNull(feature.search(subdir, new SearchFilter(name), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); final Path filesubdir = new B2TouchFeature(session, fileid).touch(new Path(subdir, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); { final AttributedList<Path> result = feature.search(workdir, new SearchFilter(filesubdir.getName()), new DisabledListProgressListener()); assertNotNull(result.find(new SimplePathPredicate(filesubdir))); assertEquals(subdir, result.find(new SimplePathPredicate(filesubdir)).getParent()); } new B2DeleteFeature(session, fileid).delete(Arrays.asList(file, filesubdir, subdir), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static void disablePushConsumption(DefaultMqPushConsumerWrapper wrapper, Set<String> topics) { Set<String> subscribedTopic = wrapper.getSubscribedTopics(); if (subscribedTopic.stream().anyMatch(topics::contains)) { suspendPushConsumer(wrapper); return; } resumePushConsumer(wrapper); }
@Test public void testDisablePullConsumptionWithSubTractTopics() { subscribedTopics = new HashSet<>(); subscribedTopics.add("test-topic-1"); subscribedTopics.add("test-topic-2"); pushConsumerWrapper.setSubscribedTopics(subscribedTopics); pushConsumerWrapper.setProhibition(false); RocketMqPushConsumerController.disablePushConsumption(pushConsumerWrapper, prohibitionTopics); Assert.assertTrue(pushConsumerWrapper.isProhibition()); // 禁消费后,再次下发禁消费 MQClientInstance clientFactory = pushConsumerWrapper.getClientFactory(); Mockito.reset(clientFactory); RocketMqPushConsumerController.disablePushConsumption(pushConsumerWrapper, prohibitionTopics); Mockito.verify(clientFactory, Mockito.times(0)) .unregisterConsumer("test-group"); }
public static void initSSL(Properties consumerProps) { // Check if one-way SSL is enabled. In this scenario, the client validates the server certificate. String trustStoreLocation = consumerProps.getProperty(SSL_TRUSTSTORE_LOCATION); String trustStorePassword = consumerProps.getProperty(SSL_TRUSTSTORE_PASSWORD); String serverCertificate = consumerProps.getProperty(STREAM_KAFKA_SSL_SERVER_CERTIFICATE); if (StringUtils.isAnyEmpty(trustStoreLocation, trustStorePassword, serverCertificate)) { LOGGER.info("Skipping auto SSL server validation since it's not configured."); return; } if (shouldRenewTrustStore(consumerProps)) { initTrustStore(consumerProps); } // Set the security protocol String securityProtocol = consumerProps.getProperty(SECURITY_PROTOCOL, DEFAULT_SECURITY_PROTOCOL); consumerProps.setProperty(SECURITY_PROTOCOL, securityProtocol); // Check if two-way SSL is enabled. In this scenario, the client validates the server's certificate and the server // validates the client's certificate. String keyStoreLocation = consumerProps.getProperty(SSL_KEYSTORE_LOCATION); String keyStorePassword = consumerProps.getProperty(SSL_KEYSTORE_PASSWORD); String keyPassword = consumerProps.getProperty(SSL_KEY_PASSWORD); String clientCertificate = consumerProps.getProperty(STREAM_KAFKA_SSL_CLIENT_CERTIFICATE); if (StringUtils.isAnyEmpty(keyStoreLocation, keyStorePassword, keyPassword, clientCertificate)) { LOGGER.info("Skipping auto SSL client validation since it's not configured."); return; } if (shouldRenewKeyStore(consumerProps)) { initKeyStore(consumerProps); } }
@Test (expectedExceptions = java.io.FileNotFoundException.class) public void testInitSSLKeyStoreOnly() throws CertificateException, NoSuchAlgorithmException, OperatorCreationException, NoSuchProviderException, IOException, KeyStoreException { Properties consumerProps = new Properties(); setKeyStoreProps(consumerProps); // should not throw any exceptions KafkaSSLUtils.initSSL(consumerProps); // Validate that no certificates are installed validateTrustStoreCertificateCount(0); }
@Udf public Long round(@UdfParameter final long val) { return val; }
@Test public void shouldRoundDoubleWithDecimalPlacesPositive() { assertThat(udf.round(0d, 0), is(0d)); assertThat(udf.round(1.0d, 0), is(1.0d)); assertThat(udf.round(1.1d, 0), is(1.0d)); assertThat(udf.round(1.5d, 0), is(2.0d)); assertThat(udf.round(1.75d, 0), is(2.0d)); assertThat(udf.round(100.1d, 0), is(100.0d)); assertThat(udf.round(100.5d, 0), is(101.0d)); assertThat(udf.round(100.75d, 0), is(101.0d)); assertThat(udf.round(100.10d, 1), is(100.1d)); assertThat(udf.round(100.11d, 1), is(100.1d)); assertThat(udf.round(100.15d, 1), is(100.2d)); assertThat(udf.round(100.17d, 1), is(100.2d)); assertThat(udf.round(100.110d, 2), is(100.11d)); assertThat(udf.round(100.111d, 2), is(100.11d)); assertThat(udf.round(100.115d, 2), is(100.12d)); assertThat(udf.round(100.117d, 2), is(100.12d)); assertThat(udf.round(100.1110d, 3), is(100.111d)); assertThat(udf.round(100.1111d, 3), is(100.111d)); assertThat(udf.round(100.1115d, 3), is(100.112d)); assertThat(udf.round(100.1117d, 3), is(100.112d)); assertThat(udf.round(12345.67d, -1), is(12350d)); assertThat(udf.round(12345.67d, -2), is(12300d)); assertThat(udf.round(12345.67d, -3), is(12000d)); assertThat(udf.round(12345.67d, -4), is(10000d)); assertThat(udf.round(12345.67d, -5), is(0d)); }
@VisibleForTesting static int checkJar(Path file) throws Exception { final URI uri = file.toUri(); int numSevereIssues = 0; try (final FileSystem fileSystem = FileSystems.newFileSystem( new URI("jar:file", uri.getHost(), uri.getPath(), uri.getFragment()), Collections.emptyMap())) { if (isTestJarAndEmpty(file, fileSystem.getPath("/"))) { return 0; } if (!noticeFileExistsAndIsValid(fileSystem.getPath("META-INF", "NOTICE"), file)) { numSevereIssues++; } if (!licenseFileExistsAndIsValid(fileSystem.getPath("META-INF", "LICENSE"), file)) { numSevereIssues++; } numSevereIssues += getNumLicenseFilesOutsideMetaInfDirectory(file, fileSystem.getPath("/")); numSevereIssues += getFilesWithIncompatibleLicenses(file, fileSystem.getPath("/")); } return numSevereIssues; }
@Test void testForbiddenLGPMultiLineLongTextWithCommentAndLeadingWhitespaceDetected( @TempDir Path tempDir) throws Exception { assertThat( JarFileChecker.checkJar( createJar( tempDir, Entry.fileEntry(VALID_NOTICE_CONTENTS, VALID_NOTICE_PATH), Entry.fileEntry(VALID_LICENSE_CONTENTS, VALID_LICENSE_PATH), Entry.fileEntry( "some GNU Lesser General public \n\t\t//#License text", Collections.singletonList("some_file.txt"))))) .isEqualTo(1); }
@Override public int read() throws IOException { if (mPosition == mLength) { // at end of file return -1; } updateStreamIfNeeded(); int res = mUfsInStream.get().read(); if (res == -1) { return -1; } mPosition++; Metrics.BYTES_READ_FROM_UFS.inc(1); return res; }
@Test public void singleByteRead() throws IOException, AlluxioException { AlluxioURI ufsPath = getUfsPath(); createFile(ufsPath, 1); try (FileInStream inStream = getStream(ufsPath)) { assertEquals(0, inStream.read()); } }
public ConfigCenterBuilder check(Boolean check) { this.check = check; return getThis(); }
@Test void check() { ConfigCenterBuilder builder = ConfigCenterBuilder.newBuilder(); builder.check(true); Assertions.assertTrue(builder.build().isCheck()); }
public static PartitionSpec toIcebergPartitionSpec(PrestoIcebergPartitionSpec spec) { return parsePartitionFields( toIcebergSchema(spec.getSchema()), spec.getFields(), spec.getSpecId()); }
@Test(dataProvider = "allTransforms") public void testToIcebergPartitionSpec(String transform, String name) { // Create a test TypeManager TypeManager typeManager = createTestFunctionAndTypeManager(); // Create a mock PartitionSpec PrestoIcebergPartitionSpec prestoPartitionSpec = prestoIcebergPartitionSpec(transform, name, typeManager); PartitionSpec expectedPartitionSpec = partitionSpec(transform, name); // Convert Presto Partition Spec to Iceberg PartitionSpec PartitionSpec partitionSpec = toIcebergPartitionSpec(prestoPartitionSpec); // Check that the result is not null assertNotNull(partitionSpec); assertEquals(partitionSpec, expectedPartitionSpec); }
public static int compareTierNames(String a, String b) { int aValue = getTierRankValue(a); int bValue = getTierRankValue(b); if (aValue == bValue) { return a.compareTo(b); } return aValue - bValue; }
@Test public void compareTierNames() { Assert.assertTrue("MEM should be placed before SSD", FileSystemAdminShellUtils.compareTierNames(Constants.MEDIUM_MEM, Constants.MEDIUM_SSD) < 0); Assert.assertTrue("MEM should be placed before HDD", FileSystemAdminShellUtils.compareTierNames(Constants.MEDIUM_MEM, Constants.MEDIUM_HDD) < 0); Assert.assertTrue("HDD should be placed after SSD", FileSystemAdminShellUtils.compareTierNames(Constants.MEDIUM_HDD, Constants.MEDIUM_SSD) > 0); Assert.assertTrue("HDD should be placed before DOM", FileSystemAdminShellUtils.compareTierNames("DOM", Constants.MEDIUM_HDD) > 0); Assert.assertTrue("RAM should be placed after DOM", FileSystemAdminShellUtils.compareTierNames("RAM", "DOM") > 0); }
@Override public void reviewPolicy(ApplicationId appId) { Application app = applicationAdminService.getApplication(appId); if (app == null) { log.warn("Unknown Application"); return; } states.computeIfPresent(appId, (applicationId, securityInfo) -> { if (securityInfo.getState().equals(INSTALLED)) { return new SecurityInfo(ImmutableSet.of(), REVIEWED); } return securityInfo; }); }
@Test public void testReviewPolicy() { assertEquals(SECURED, states.get(appId).getState()); states.computeIfPresent(appId, (applicationId, securityInfo) -> { if (securityInfo.getState().equals(SECURED)) { return new SecurityInfo(ImmutableSet.of(), REVIEWED); } return securityInfo; }); assertEquals(REVIEWED, states.get(appId).getState()); }
public synchronized Topology addStateStore(final StoreBuilder<?> storeBuilder, final String... processorNames) { internalTopologyBuilder.addStateStore(storeBuilder, processorNames); return this; }
@Test public void shouldNotAllowToAddStateStoreToNonExistingProcessor() { mockStoreBuilder(); assertThrows(TopologyException.class, () -> topology.addStateStore(storeBuilder, "no-such-processor")); }
@Override public int maxCapacity() { return maxCapacity; }
@Test public void testCapacityDecrease() { ByteBuf buffer = newBuffer(3, 13); assertEquals(13, buffer.maxCapacity()); assertEquals(3, buffer.capacity()); try { buffer.capacity(2); assertEquals(2, buffer.capacity()); assertEquals(13, buffer.maxCapacity()); } finally { buffer.release(); } }
public static <E, K, V> void setValueByMap(Iterable<E> iterable, Map<K, V> map, Function<E, K> keyGenerate, BiConsumer<E, V> biConsumer) { iterable.forEach(x -> Optional.ofNullable(map.get(keyGenerate.apply(x))).ifPresent(y -> biConsumer.accept(x, y))); }
@Test public void setValueByMapTest() { // https://gitee.com/dromara/hutool/pulls/482 final List<Person> people = Arrays.asList( new Person("aa", 12, "man", 1), new Person("bb", 13, "woman", 2), new Person("cc", 14, "man", 3), new Person("dd", 15, "woman", 4), new Person("ee", 16, "woman", 5), new Person("ff", 17, "man", 6) ); final Map<Integer, String> genderMap = new HashMap<>(); genderMap.put(1, null); genderMap.put(2, "妇女"); genderMap.put(3, "少女"); genderMap.put(4, "女"); genderMap.put(5, "小孩"); genderMap.put(6, "男"); assertEquals(people.get(1).getGender(), "woman"); CollUtil.setValueByMap(people, genderMap, Person::getId, Person::setGender); assertEquals(people.get(1).getGender(), "妇女"); final Map<Integer, Person> personMap = new HashMap<>(); personMap.put(1, new Person("AA", 21, "男", 1)); personMap.put(2, new Person("BB", 7, "小孩", 2)); personMap.put(3, new Person("CC", 65, "老人", 3)); personMap.put(4, new Person("DD", 35, "女人", 4)); personMap.put(5, new Person("EE", 14, "少女", 5)); personMap.put(6, null); CollUtil.setValueByMap(people, personMap, Person::getId, (x, y) -> { x.setGender(y.getGender()); x.setName(y.getName()); x.setAge(y.getAge()); }); assertEquals(people.get(1).getGender(), "小孩"); }
@Override public String toString() { return MoreObjects.toStringHelper(getClass()) .add("prefix", prefix()) .add("nextHop", nextHop()) .add("bgpId", bgpSession.remoteInfo().bgpId()) .add("origin", BgpConstants.Update.Origin.typeToString(origin)) .add("asPath", asPath) .add("localPref", localPref) .add("multiExitDisc", multiExitDisc) .toString(); }
@Test public void testToString() { BgpRouteEntry bgpRouteEntry = generateBgpRouteEntry(); String expectedString = "BgpRouteEntry{prefix=1.2.3.0/24, nextHop=5.6.7.8, " + "bgpId=10.0.0.1, origin=IGP, asPath=AsPath{pathSegments=" + "[PathSegment{type=AS_SEQUENCE, segmentAsNumbers=[1, 2, 3]}, " + "PathSegment{type=AS_SET, segmentAsNumbers=[4, 5, 6]}]}, " + "localPref=100, multiExitDisc=20}"; assertThat(bgpRouteEntry.toString(), is(expectedString)); }
@JsonCreator public static WindowInfo of( @JsonProperty(value = "type", required = true) final WindowType type, @JsonProperty(value = "size") final Optional<Duration> size, @JsonProperty(value = "emitStrategy") final Optional<OutputRefinement> emitStrategy) { return new WindowInfo(type, size, emitStrategy); }
@Test(expected = IllegalArgumentException.class) public void shouldThrowIfSizeRequiredButNotProvided() { WindowInfo.of(TUMBLING, Optional.empty(), Optional.empty()); }
@Override public Iterable<K> loadAllKeys() { // If loadAllKeys property is disabled, don't load anything if (!genericMapStoreProperties.loadAllKeys) { return Collections.emptyList(); } awaitSuccessfulInit(); String sql = queries.loadAllKeys(); SqlResult keysResult = sqlService.execute(sql); // The contract for loadAllKeys says that if iterator implements Closable // then it will be closed when the iteration is over return () -> new MappingClosingIterator<>( keysResult.iterator(), (SqlRow row) -> row.getObject(genericMapStoreProperties.idColumn), keysResult::close ); }
@Test public void givenNoRows_whenLoadAllKeys_thenEmptyIterable() { objectProvider.createObject(mapName, false); mapLoader = createMapLoader(); Iterable<Integer> ids = mapLoader.loadAllKeys(); assertThat(ids).isEmpty(); }
public Bson createDbQuery(final List<String> filters, final String query) { try { final var searchQuery = searchQueryParser.parse(query); final var filterExpressionFilters = dbFilterParser.parse(filters, attributes); return buildDbQuery(searchQuery, filterExpressionFilters); } catch (IllegalArgumentException e) { throw new BadRequestException("Invalid argument in search query: " + e.getMessage()); } }
@Test void returnsEmptyQueryOnEmptySearchQueryAndNoFilterExpressions() { doReturn(new SearchQuery("")).when(searchQueryParser).parse(eq("")); doReturn(List.of()).when(dbFilterParser).parse(List.of(), attributes); final Bson dbQuery = toTest.createDbQuery(List.of(), ""); assertSame(DbQueryCreator.EMPTY_QUERY, dbQuery); }
public static String escape(char c) { final StringBuilder builder = new StringBuilder(); if (RE_KEYS.contains(c)) { builder.append('\\'); } builder.append(c); return builder.toString(); }
@Test public void escapeTest() { //转义给定字符串,为正则相关的特殊符号转义 final String escape = ReUtil.escape("我有个$符号{}"); assertEquals("我有个\\$符号\\{\\}", escape); }
@Override public void filter(ContainerRequestContext requestContext) throws IOException { ThreadContext.unbindSubject(); final boolean secure = requestContext.getSecurityContext().isSecure(); final MultivaluedMap<String, String> headers = requestContext.getHeaders(); final Map<String, Cookie> cookies = requestContext.getCookies(); final Request grizzlyRequest = grizzlyRequestProvider.get(); final String host = RestTools.getRemoteAddrFromRequest(grizzlyRequest, trustedProxies); final String authHeader = headers.getFirst(HttpHeaders.AUTHORIZATION); final Set<Class<?>> matchedResources = requestContext.getUriInfo().getMatchedResources().stream() .map(Object::getClass).collect(Collectors.toSet()); final SecurityContext securityContext; if (authHeader != null && authHeader.startsWith("Basic")) { final String base64UserPass = authHeader.substring(authHeader.indexOf(' ') + 1); final String userPass = decodeBase64(base64UserPass); final String[] split = userPass.split(":", 2); if (split.length != 2) { throw new BadRequestException("Invalid credentials in Authorization header"); } securityContext = createSecurityContext(split[0], split[1], secure, SecurityContext.BASIC_AUTH, host, grizzlyRequest.getRemoteAddr(), headers, cookies, matchedResources); } else { securityContext = createSecurityContext(null, null, secure, null, host, grizzlyRequest.getRemoteAddr(), headers, cookies, matchedResources); } requestContext.setSecurityContext(securityContext); }
@Test public void filterWithBasicAuthAndTokenShouldCreateShiroSecurityContextWithAccessTokenAuthToken() throws Exception { final MultivaluedHashMap<String, String> headers = new MultivaluedHashMap<>(); final String credentials = Base64.getEncoder().encodeToString("test:token".getBytes(StandardCharsets.US_ASCII)); headers.putSingle(HttpHeaders.AUTHORIZATION, "Basic " + credentials); when(requestContext.getHeaders()).thenReturn(headers); filter.filter(requestContext); final ArgumentCaptor<ShiroSecurityContext> argument = ArgumentCaptor.forClass(ShiroSecurityContext.class); verify(requestContext).setSecurityContext(argument.capture()); final ShiroSecurityContext securityContext = argument.getValue(); assertThat(securityContext).isExactlyInstanceOf(ShiroSecurityContext.class); assertThat(securityContext.getAuthenticationScheme()).isEqualTo(SecurityContext.BASIC_AUTH); assertThat(securityContext.getToken()).isExactlyInstanceOf(AccessTokenAuthToken.class); }
public static Builder builder(Type type) { return new Builder(type); }
@Test public void set_key_throws_NPE_if_component_arg_is_Null() { assertThatThrownBy(() -> builder(FILE).setUuid(null)) .isInstanceOf(NullPointerException.class); }
@Override public void stopApplication(ApplicationTerminationContext context) { ApplicationId appId = context.getApplicationId(); JobID jobId = new JobID(Long.toString(appId.getClusterTimestamp()), appId.getId()); try { removeJobShuffleInfo(jobId); } catch (IOException e) { LOG.error("Error during stopApp", e); // TODO add API to AuxiliaryServices to report failures } }
@Test public void testRecovery() throws IOException { final File tmpDir = new File(System.getProperty("test.build.data", System.getProperty("java.io.tmpdir")), TestShuffleHandler.class.getName()); ShuffleHandlerMock shuffle = new ShuffleHandlerMock(); Configuration conf = new Configuration(); conf.setInt(ShuffleHandler.MAX_SHUFFLE_CONNECTIONS, 3); // emulate aux services startup with recovery enabled shuffle.setRecoveryPath(new Path(tmpDir.toString())); assertTrue(tmpDir.mkdirs()); try { shuffle.init(conf); shuffle.start(); final String port = shuffle.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY); final SecretKey secretKey = shuffle.addTestApp(TEST_USER); // verify we are authorized to shuffle int rc = getShuffleResponseCode(port, secretKey); assertEquals(HttpURLConnection.HTTP_OK, rc); // emulate shuffle handler restart shuffle.close(); shuffle = new ShuffleHandlerMock(); shuffle.setRecoveryPath(new Path(tmpDir.toString())); shuffle.init(conf); shuffle.start(); // verify we are still authorized to shuffle to the old application rc = getShuffleResponseCode(port, secretKey); assertEquals(HttpURLConnection.HTTP_OK, rc); // shutdown app and verify access is lost shuffle.stopApplication(new ApplicationTerminationContext(TEST_APP_ID)); rc = getShuffleResponseCode(port, secretKey); assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, rc); // emulate shuffle handler restart shuffle.close(); shuffle = new ShuffleHandlerMock(); shuffle.setRecoveryPath(new Path(tmpDir.toString())); shuffle.init(conf); shuffle.start(); // verify we still don't have access rc = getShuffleResponseCode(port, secretKey); assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, rc); } finally { shuffle.close(); FileUtil.fullyDelete(tmpDir); } }
@Override public Map<K, V> getCachedMap() { return localCacheView.getCachedMap(); }
@Test public void testInvalidationOnUpdateNonBinaryCodec() throws InterruptedException { LocalCachedMapOptions<String, String> options = LocalCachedMapOptions.<String, String>name("test") .codec(StringCodec.INSTANCE) .evictionPolicy(EvictionPolicy.LFU).cacheSize(5); RLocalCachedMap<String, String> map1 = redisson.getLocalCachedMap(options); Map<String, String> cache1 = map1.getCachedMap(); RLocalCachedMap<String, String> map2 = redisson.getLocalCachedMap(options); Map<String, String> cache2 = map2.getCachedMap(); map1.put("1", "1"); map1.put("2", "2"); assertThat(map2.get("1")).isEqualTo("1"); assertThat(map2.get("2")).isEqualTo("2"); assertThat(cache1.size()).isEqualTo(2); assertThat(cache2.size()).isEqualTo(2); map1.put("1", "3"); map2.put("2", "4"); Thread.sleep(50); assertThat(cache1.size()).isEqualTo(1); assertThat(cache2.size()).isEqualTo(1); }
@Override @CheckForNull public EmailMessage format(Notification notif) { if (!(notif instanceof ChangesOnMyIssuesNotification)) { return null; } ChangesOnMyIssuesNotification notification = (ChangesOnMyIssuesNotification) notif; if (notification.getChange() instanceof AnalysisChange) { checkState(!notification.getChangedIssues().isEmpty(), "changedIssues can't be empty"); return formatAnalysisNotification(notification.getChangedIssues().keySet().iterator().next(), notification); } return formatMultiProject(notification); }
@Test public void formats_returns_html_message_with_multiple_links_by_rule_of_groups_of_up_to_40_issues_and_hotspots_when_user_change() { Project project1 = newProject("1"); Project project2 = newProject("V"); Project project2Branch = newBranch("V", "AB"); Rule rule1 = newRule("1", randomRuleTypeHotspotExcluded()); Rule rule2 = newRule("a", randomRuleTypeHotspotExcluded()); Rule hotspot1 = newSecurityHotspotRule("h1"); Rule hotspot2 = newSecurityHotspotRule("h2"); String status = randomValidStatus(); String host = randomAlphabetic(15); List<ChangedIssue> changedIssues = Stream.of( IntStream.range(0, 39).mapToObj(i -> newChangedIssue("39_" + i, status, project1, rule1)), IntStream.range(0, 40).mapToObj(i -> newChangedIssue("40_" + i, status, project1, rule2)), IntStream.range(0, 81).mapToObj(i -> newChangedIssue("1-40_41-80_1_" + i, status, project2, rule2)), IntStream.range(0, 6).mapToObj(i -> newChangedIssue("6_" + i, status, project2Branch, rule1)), IntStream.range(0, 39).mapToObj(i -> newChangedIssue("39_" + i, STATUS_REVIEWED, project1, hotspot1)), IntStream.range(0, 40).mapToObj(i -> newChangedIssue("40_" + i, STATUS_REVIEWED, project1, hotspot2)), IntStream.range(0, 81).mapToObj(i -> newChangedIssue("1-40_41-80_1_" + i, STATUS_TO_REVIEW, project2, hotspot2)), IntStream.range(0, 6).mapToObj(i -> newChangedIssue("6_" + i, STATUS_TO_REVIEW, project2Branch, hotspot1))) .flatMap(t -> t) .collect(toList()); Collections.shuffle(changedIssues); UserChange userChange = newUserChange(); when(emailSettings.getServerBaseURL()).thenReturn(host); EmailMessage emailMessage = underTest.format(new ChangesOnMyIssuesNotification(userChange, ImmutableSet.copyOf(changedIssues))); HtmlFragmentAssert.assertThat(emailMessage.getMessage()) .hasParagraph().hasParagraph() // skip header .hasParagraph(project1.getProjectName()) .hasList() .withItemTexts( "Rule " + rule1.getName() + " - See all 39 issues", "Rule " + rule2.getName() + " - See all 40 issues") .withLink("See all 39 issues", host + "/project/issues?id=" + project1.getKey() + "&issues=" + IntStream.range(0, 39).mapToObj(i -> "39_" + i).sorted().collect(joining("%2C"))) .withLink("See all 40 issues", host + "/project/issues?id=" + project1.getKey() + "&issues=" + IntStream.range(0, 40).mapToObj(i -> "40_" + i).sorted().collect(joining("%2C"))) .hasEmptyParagraph() .hasList() .withItemTexts( "Rule " + hotspot1.getName() + " - See all 39 hotspots", "Rule " + hotspot2.getName() + " - See all 40 hotspots") .withLink("See all 39 hotspots", host + "/security_hotspots?id=" + project1.getKey() + "&hotspots=" + IntStream.range(0, 39).mapToObj(i -> "39_" + i).sorted().collect(joining("%2C"))) .withLink("See all 40 hotspots", host + "/security_hotspots?id=" + project1.getKey() + "&hotspots=" + IntStream.range(0, 40).mapToObj(i -> "40_" + i).sorted().collect(joining("%2C"))) .hasParagraph(project2.getProjectName()) .hasList( "Rule " + rule2.getName() + " - See issues 1-40 41-80 81") .withLink("1-40", host + "/project/issues?id=" + project2.getKey() + "&issues=" + IntStream.range(0, 81).mapToObj(i -> "1-40_41-80_1_" + i).sorted().limit(40).collect(joining("%2C"))) .withLink("41-80", host + "/project/issues?id=" + project2.getKey() + "&issues=" + IntStream.range(0, 81).mapToObj(i -> "1-40_41-80_1_" + i).sorted().skip(40).limit(40).collect(joining("%2C"))) .withLink("81", host + "/project/issues?id=" + project2.getKey() + "&issues=" + "1-40_41-80_1_9" + "&open=" + "1-40_41-80_1_9") .hasEmptyParagraph() .hasList("Rule " + hotspot2.getName() + " - See hotspots 1-40 41-80 81") .withLink("1-40", host + "/security_hotspots?id=" + project2.getKey() + "&hotspots=" + IntStream.range(0, 81).mapToObj(i -> "1-40_41-80_1_" + i).sorted().limit(40).collect(joining("%2C"))) .withLink("41-80", host + "/security_hotspots?id=" + project2.getKey() + "&hotspots=" + IntStream.range(0, 81).mapToObj(i -> "1-40_41-80_1_" + i).sorted().skip(40).limit(40).collect(joining("%2C"))) .withLink("81", host + "/security_hotspots?id=" + project2.getKey() + "&hotspots=" + "1-40_41-80_1_9") .hasParagraph(project2Branch.getProjectName() + ", " + project2Branch.getBranchName().get()) .hasList( "Rule " + rule1.getName() + " - See all 6 issues") .withLink("See all 6 issues", host + "/project/issues?id=" + project2Branch.getKey() + "&branch=" + project2Branch.getBranchName().get() + "&issues=" + IntStream.range(0, 6).mapToObj(i -> "6_" + i).sorted().collect(joining("%2C"))) .hasEmptyParagraph() .hasList("Rule " + hotspot1.getName() + " - See all 6 hotspots") .withLink("See all 6 hotspots", host + "/security_hotspots?id=" + project2Branch.getKey() + "&branch=" + project2Branch.getBranchName().get() + "&hotspots=" + IntStream.range(0, 6).mapToObj(i -> "6_" + i).sorted().collect(joining("%2C"))) .hasParagraph().hasParagraph() // skip footer .noMoreBlock(); }
public KsqlEntityList execute( final KsqlSecurityContext securityContext, final List<ParsedStatement> statements, final SessionProperties sessionProperties ) { final KsqlEntityList entities = new KsqlEntityList(); for (final ParsedStatement parsed : statements) { final PreparedStatement<?> prepared = ksqlEngine.prepare( parsed, (isVariableSubstitutionEnabled(sessionProperties) ? sessionProperties.getSessionVariables() : Collections.emptyMap()) ); executeStatement( securityContext, prepared, sessionProperties, entities ).ifPresent(entities::add); } return entities; }
@SuppressFBWarnings("RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT") @Test public void shouldCallPrepareStatementWithSessionVariables() { // Given final StatementExecutor<CreateStream> customExecutor = givenReturningExecutor(CreateStream.class, mock(KsqlEntity.class)); givenRequestHandler(ImmutableMap.of(CreateStream.class, customExecutor)); final Map<String, String> sessionVariables = ImmutableMap.of("a", "1"); when(sessionProperties.getSessionVariables()).thenReturn(sessionVariables); when(ksqlConfig.getBoolean(KsqlConfig.KSQL_VARIABLE_SUBSTITUTION_ENABLE)).thenReturn(true); // When final List<ParsedStatement> statements = KSQL_PARSER.parse(SOME_STREAM_SQL); handler.execute(securityContext, statements, sessionProperties); // Then verify(ksqlEngine).prepare(statements.get(0), sessionVariables); verify(sessionProperties).getSessionVariables(); }
public static ShardingSphereDatabase create(final String databaseName, final DatabaseConfiguration databaseConfig, final ConfigurationProperties props, final ComputeNodeInstanceContext computeNodeInstanceContext) throws SQLException { return ShardingSphereDatabase.create(databaseName, DatabaseTypeEngine.getProtocolType(databaseConfig, props), DatabaseTypeEngine.getStorageTypes(databaseConfig), databaseConfig, props, computeNodeInstanceContext); }
@Test void assertCreateDatabaseMapWhenConfigUppercaseDatabaseName() throws SQLException { DatabaseConfiguration databaseConfig = new DataSourceProvidedDatabaseConfiguration(Collections.emptyMap(), Collections.emptyList()); Map<String, ShardingSphereDatabase> actual = ExternalMetaDataFactory.create( Collections.singletonMap("FOO_DB", databaseConfig), new ConfigurationProperties(new Properties()), mock(ComputeNodeInstanceContext.class)); assertTrue(actual.containsKey("foo_db")); assertTrue(actual.get("foo_db").getResourceMetaData().getStorageUnits().isEmpty()); }
@Override public void delete(Collector nativeEntity) { collectorService.delete(nativeEntity.id()); }
@Test @MongoDBFixtures("SidecarCollectorFacadeTest.json") public void delete() { final Collector collector = collectorService.find("5b4c920b4b900a0024af0001"); assertThat(collectorService.count()).isEqualTo(3L); facade.delete(collector); assertThat(collectorService.count()).isEqualTo(2L); }
public List<PluggableArtifactConfig> getPluggableArtifactConfigs() { final List<PluggableArtifactConfig> artifactConfigs = new ArrayList<>(); for (ArtifactTypeConfig artifactTypeConfig : this) { if (artifactTypeConfig instanceof PluggableArtifactConfig) { artifactConfigs.add((PluggableArtifactConfig) artifactTypeConfig); } } return artifactConfigs; }
@Test public void getPluggableArtifactConfigs_shouldReturnPluggableArtifactConfigs() { ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs(); allConfigs.add(new BuildArtifactConfig("src", "dest")); allConfigs.add(new BuildArtifactConfig("java", null)); allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3")); allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker")); final List<PluggableArtifactConfig> artifactConfigs = allConfigs.getPluggableArtifactConfigs(); assertThat(artifactConfigs, hasSize(2)); assertThat(artifactConfigs, containsInAnyOrder( new PluggableArtifactConfig("s3", "cd.go.s3"), new PluggableArtifactConfig("docker", "cd.go.docker") )); }
public static Object convertTextToText(Object source, MediaType sourceType, MediaType destinationType) { if (source == null) return null; if (sourceType == null) throw new NullPointerException("MediaType cannot be null!"); if (!sourceType.match(MediaType.TEXT_PLAIN)) throw CONTAINER.invalidMediaType(TEXT_PLAIN_TYPE, sourceType.toString()); boolean asString = destinationType.hasStringType(); Charset sourceCharset = sourceType.getCharset(); Charset destinationCharset = destinationType.getCharset(); if (sourceCharset.equals(destinationCharset)) return convertTextClass(source, destinationType, asString); byte[] byteContent = source instanceof byte[] ? (byte[]) source : source.toString().getBytes(sourceCharset); return convertTextClass(convertCharset(byteContent, sourceCharset, destinationCharset), destinationType, asString); }
@Test public void textToTextConversion() { String source = "All those moments will be lost in time, like tears in rain."; byte[] sourceAs8859 = source.getBytes(ISO_8859_1); byte[] sourceAsASCII = source.getBytes(US_ASCII); Object result = StandardConversions.convertTextToText(sourceAs8859, TEXT_PLAIN.withCharset(ISO_8859_1), TEXT_PLAIN.withCharset(US_ASCII)); assertArrayEquals(sourceAsASCII, (byte[]) result); }
public static double calculateFromSamplesUsingVasicek(double[] samples) { if (samples.length == 0) { return Double.NaN; } Arrays.sort(samples); int n = samples.length; int m = toIntExact(Math.max(Math.round(Math.sqrt(n)), 2)); double entropy = 0; for (int i = 0; i < n; i++) { double sIPlusM = i + m < n ? samples[i + m] : samples[n - 1]; double sIMinusM = i - m > 0 ? samples[i - m] : samples[0]; double aI = i + m < n && i - m > 0 ? 2 : 1; entropy += Math.log(n / (aI * m) * (sIPlusM - sIMinusM)); } return entropy / n / Math.log(2); }
@Test public void testUniformDistribution() { Random random = new Random(13); double[] samples = new double[10000000]; for (int i = 0; i < samples.length; i++) { samples[i] = random.nextDouble(); } assertEquals(calculateFromSamplesUsingVasicek(samples), 0, 0.02); }
public Collection<String> getActualDataSourceNames() { return actualDataSourceNames; }
@Test void assertGetActualDataSourceNames() { ShardingTable actual = new ShardingTable(new ShardingTableRuleConfiguration("LOGIC_TABLE", "ds${0..1}.table_${0..2}"), Arrays.asList("ds0", "ds1"), null); assertThat(actual.getActualDataSourceNames(), is(new LinkedHashSet<>(Arrays.asList("ds0", "ds1")))); }
@Override public SchemaKTable<K> into( final KsqlTopic topic, final QueryContext.Stacker contextStacker, final Optional<TimestampColumn> timestampColumn ) { if (!keyFormat.getWindowInfo().equals(topic.getKeyFormat().getWindowInfo())) { throw new IllegalArgumentException("Can't change windowing"); } final TableSink<K> step = ExecutionStepFactory.tableSink( contextStacker, sourceTableStep, Formats.from(topic), topic.getKafkaTopicName(), timestampColumn ); return new SchemaKTable<>( step, resolveSchema(step), keyFormat, ksqlConfig, functionRegistry ); }
@Test public void shouldThrowOnIntoIfKeyFormatWindowInfoIsDifferent() { // Given: final SchemaKTable<?> table = buildSchemaKTable(ksqlTable, mockKTable); when(topic.getKeyFormat()).thenReturn(KeyFormat.windowed( keyFormat.getFormatInfo(), SerdeFeatures.of(), WindowInfo.of(WindowType.SESSION, Optional.empty(), Optional.empty()) )); // When: assertThrows( IllegalArgumentException.class, () -> table.into(topic, childContextStacker, Optional.empty()) ); }
public static URI parse(String gluePath) { requireNonNull(gluePath, "gluePath may not be null"); if (gluePath.isEmpty()) { return rootPackageUri(); } // Legacy from the Cucumber Eclipse plugin // Older versions of Cucumber allowed it. if (CLASSPATH_SCHEME_PREFIX.equals(gluePath)) { return rootPackageUri(); } if (nonStandardPathSeparatorInUse(gluePath)) { String standardized = replaceNonStandardPathSeparator(gluePath); return parseAssumeClasspathScheme(standardized); } if (isProbablyPackage(gluePath)) { String path = resourceNameOfPackageName(gluePath); return parseAssumeClasspathScheme(path); } return parseAssumeClasspathScheme(gluePath); }
@Test void glue_path_must_have_class_path_scheme() { Executable testMethod = () -> GluePath.parse("file:com/example/app"); IllegalArgumentException actualThrown = assertThrows(IllegalArgumentException.class, testMethod); assertThat("Unexpected exception message", actualThrown.getMessage(), is(equalTo( "The glue path must have a classpath scheme file:com/example/app"))); }
public LongValue increment(long increment) { this.value += increment; this.set = true; return this; }
@Test public void multiple_calls_to_increment_LongVariationValue_increments_by_the_value_of_the_arg() { LongValue target = new LongValue() .increment(new LongValue().increment(35L)) .increment(new LongValue().increment(10L)); verifySetVariationValue(target, 45L); }
public static String encode(final String input) { try { final StringBuilder b = new StringBuilder(); final StringTokenizer t = new StringTokenizer(input, "/"); if(!t.hasMoreTokens()) { return input; } if(StringUtils.startsWith(input, String.valueOf(Path.DELIMITER))) { b.append(Path.DELIMITER); } while(t.hasMoreTokens()) { b.append(URLEncoder.encode(t.nextToken(), StandardCharsets.UTF_8.name())); if(t.hasMoreTokens()) { b.append(Path.DELIMITER); } } if(StringUtils.endsWith(input, String.valueOf(Path.DELIMITER))) { b.append(Path.DELIMITER); } // Because URLEncoder uses <code>application/x-www-form-urlencoded</code> we have to replace these // for proper URI percented encoding. return StringUtils.replaceEach(b.toString(), new String[]{"+", "*", "%7E", "%40"}, new String[]{"%20", "%2A", "~", "@"}); } catch(UnsupportedEncodingException e) { log.warn(String.format("Failure %s encoding input %s", e, input)); return input; } }
@Test public void testEncode() { assertEquals("/p", URIEncoder.encode("/p")); assertEquals("/p%20d", URIEncoder.encode("/p d")); }
@Override public Batch toBatch() { return new SparkBatch( sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode()); }
@Test public void testUnpartitionedIsNull() throws Exception { createUnpartitionedTable(spark, tableName); SparkScanBuilder builder = scanBuilder(); TruncateFunction.TruncateString function = new TruncateFunction.TruncateString(); UserDefinedScalarFunc udf = toUDF(function, expressions(intLit(4), fieldRef("data"))); Predicate predicate = new Predicate("IS_NULL", expressions(udf)); pushFilters(builder, predicate); Batch scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(10); // NOT IsNull builder = scanBuilder(); predicate = new Not(predicate); pushFilters(builder, predicate); scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(10); }
@Override @MethodNotAvailable public void loadAll(boolean replaceExistingValues) { throw new MethodNotAvailableException(); }
@Test(expected = MethodNotAvailableException.class) public void testLoadAllWithKeys() { adapterWithLoader.loadAll(Collections.emptySet(), true); }
public BetaDistribution(double alpha, double beta) { if (alpha <= 0) { throw new IllegalArgumentException("Invalid alpha: " + alpha); } if (beta <= 0) { throw new IllegalArgumentException("Invalid beta: " + beta); } this.alpha = alpha; this.beta = beta; mean = alpha / (alpha + beta); variance = alpha * beta / ((alpha + beta) * (alpha + beta) * (alpha + beta + 1)); entropy = Math.log(Beta.beta(alpha, beta)) - (alpha - 1) * Gamma.digamma(alpha) - (beta - 1) * Gamma.digamma(beta) + (alpha + beta - 2) * Gamma.digamma(alpha + beta); }
@Test public void testBetaDistribution() { System.out.println("BetaDistribution"); MathEx.setSeed(19650218); // to get repeatable results. BetaDistribution instance = new BetaDistribution(3, 2.1); double[] data = instance.rand(1000); BetaDistribution est = BetaDistribution.fit(data); assertEquals(3.31, est.alpha(), 1E-2); assertEquals(2.31, est.beta(), 1E-2); }
protected Map<String, Map<String, Object>> fieldProperties(final String analyzer, final CustomFieldMappings customFieldMappings) { final ImmutableMap.Builder<String, Map<String, Object>> builder = ImmutableMap.<String, Map<String, Object>>builder() .put(Message.FIELD_MESSAGE, analyzedString(analyzer, false)) // http://joda-time.sourceforge.net/api-release/org/joda/time/format/DateTimeFormat.html // http://www.elasticsearch.org/guide/reference/mapping/date-format.html .put(Message.FIELD_TIMESTAMP, typeTimeWithMillis()) .put(Message.FIELD_GL2_ACCOUNTED_MESSAGE_SIZE, typeLong()) .put(Message.FIELD_GL2_RECEIVE_TIMESTAMP, typeTimeWithMillis()) .put(Message.FIELD_GL2_PROCESSING_TIMESTAMP, typeTimeWithMillis()) .put(Message.FIELD_GL2_PROCESSING_DURATION_MS, typeInteger()) .put(FIELD_GL2_MESSAGE_ID, notAnalyzedString()) .put(Message.GL2_SECOND_SORT_FIELD, aliasTo(FIELD_GL2_MESSAGE_ID)) .put(Message.FIELD_STREAMS, notAnalyzedString()) // to support wildcard searches in source we need to lowercase the content (wildcard search lowercases search term) .put(Message.FIELD_SOURCE, analyzedString("analyzer_keyword", true)); if (customFieldMappings != null) { customFieldMappings.stream() .filter(customMapping -> !FIELDS_UNCHANGEABLE_BY_CUSTOM_MAPPINGS.contains(customMapping.fieldName())) //someone might have hardcoded reserved field mapping on MongoDB level, bypassing checks .forEach(customMapping -> builder.put(customMapping.fieldName(), type(customMapping.toPhysicalType()))); } //those FIELD_FULL_MESSAGE field have not been yet made reserved, so it can be added to ImmutableMap only if they do not exist in Custom Mapping if (customFieldMappings == null || !customFieldMappings.containsCustomMappingForField(Message.FIELD_FULL_MESSAGE)) { builder.put(Message.FIELD_FULL_MESSAGE, analyzedString(analyzer, false)); } return builder.build(); }
@Test void allowsOverridingNonBlacklistedFieldsWithCustomMapping() { IndexMapping indexMapping = new IndexMapping7(); final Map<String, Map<String, Object>> fieldProperties = indexMapping.fieldProperties("english", new CustomFieldMappings(List.of(new CustomFieldMapping("sampleField", "geo-point")))); final Map<String, Object> forSampleField = fieldProperties.get("sampleField"); assertEquals("geo_point", forSampleField.get("type")); }
@Override public void setTimestamp(final Path file, final TransferStatus status) throws BackgroundException { try { final String resourceId = fileid.getFileId(file); final ResourceUpdateModel resourceUpdateModel = new ResourceUpdateModel(); ResourceUpdateModelUpdate resourceUpdateModelUpdate = new ResourceUpdateModelUpdate(); UiWin32 uiWin32 = new UiWin32(); uiWin32.setCreationMillis(null != status.getCreated() ? status.getCreated() : null); uiWin32.setLastModificationMillis(null != status.getModified() ? status.getModified() : null); resourceUpdateModelUpdate.setUiwin32(uiWin32); resourceUpdateModel.setUpdate(resourceUpdateModelUpdate); new UpdateResourceApi(new EueApiClient(session)).resourceResourceIdPatch(resourceId, resourceUpdateModel, null, null, null); } catch(ApiException e) { throw new EueExceptionMappingService().map("Failure to write attributes of {0}", e, file); } }
@Test public void testSetTimestampFile() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path container = new EueDirectoryFeature(session, fileid).mkdir(new Path( new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path file = new EueTouchFeature(session, fileid) .touch(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus().withLength(0L)); final PathAttributes attr = new EueAttributesFinderFeature(session, fileid).find(file); assertNotEquals(PathAttributes.EMPTY, attr); assertNotNull(attr.getETag()); final long modified = Instant.now().minusSeconds(5 * 24 * 60 * 60).getEpochSecond() * 1000; new EueTimestampFeature(session, fileid).setTimestamp(file, modified); final PathAttributes updated = new EueAttributesFinderFeature(session, fileid).find(file); assertEquals(modified, updated.getModificationDate()); assertNotEquals(attr.getETag(), updated.getETag()); assertEquals(attr.getChecksum(), updated.getChecksum()); assertEquals(modified, new DefaultAttributesFinderFeature(session).find(file).getModificationDate()); new EueDeleteFeature(session, fileid).delete(Collections.singletonList(container), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static double schemaNameToScaleFactor(String schemaName) { if (TINY_SCHEMA_NAME.equals(schemaName)) { return TINY_SCALE_FACTOR; } if (!schemaName.startsWith("sf")) { return -1; } try { return Double.parseDouble(schemaName.substring(2)); } catch (Exception ignored) { return -1; } }
@Test public void testTableStatsWithConstraints() { SUPPORTED_SCHEMAS.forEach(schema -> { double scaleFactor = TpchMetadata.schemaNameToScaleFactor(schema); testTableStats(schema, ORDERS, alwaysFalse(), 0); testTableStats(schema, ORDERS, constraint(ORDER_STATUS, "NO SUCH STATUS"), 0); testTableStats(schema, ORDERS, constraint(ORDER_STATUS, "F"), 730_400 * scaleFactor); testTableStats(schema, ORDERS, constraint(ORDER_STATUS, "O"), 733_300 * scaleFactor); testTableStats(schema, ORDERS, constraint(ORDER_STATUS, "F", "NO SUCH STATUS"), 730_400 * scaleFactor); testTableStats(schema, ORDERS, constraint(ORDER_STATUS, "F", "O", "P"), 1_500_000 * scaleFactor); }); }
public static Object construct(Object something) throws Exception { if (something instanceof String) { return Class.forName((String)something).getConstructor().newInstance(); } else if (something instanceof Map) { // keys are the class name, values are the parameters. for (Map.Entry<String, Object> entry : ((Map<String, Object>) something).entrySet()) { if (entry.getValue() instanceof Map) { return constructByNamedParams(Class.forName(entry.getKey()), (Map)entry.getValue()); } else if (entry.getValue() instanceof List) { return constructByParameterizedConstructor(Class.forName(entry.getKey()), (List)entry.getValue()); } } } return null; }
@Test public void classWithNamedParams_constructed_setsParams() throws Exception { Map<String, Object> params = new HashMap<>(); String testName = "Nick"; params.put("name", testName); Map<String, Object> constructMap = new HashMap<>(); constructMap.put("com.networknt.service.GImpl", params); GImpl object = (GImpl)ServiceUtil.construct(constructMap); Assert.assertEquals(testName, object.getName()); }
@VisibleForTesting static boolean matchMetric(MetricName metricName, @Nullable Set<String> allowedMetricUrns) { if (allowedMetricUrns == null) { return true; } if (metricName instanceof MonitoringInfoMetricName) { return allowedMetricUrns.contains(((MonitoringInfoMetricName) metricName).getUrn()); } return false; }
@Test public void testMatchMetric() { String urn = MonitoringInfoConstants.Urns.API_REQUEST_COUNT; Map<String, String> labels = new HashMap<String, String>(); labels.put(MonitoringInfoConstants.Labels.PTRANSFORM, "MyPtransform"); labels.put(MonitoringInfoConstants.Labels.SERVICE, "BigQuery"); labels.put(MonitoringInfoConstants.Labels.METHOD, "BigQueryBatchWrite"); labels.put(MonitoringInfoConstants.Labels.RESOURCE, "Resource"); labels.put(MonitoringInfoConstants.Labels.BIGQUERY_PROJECT_ID, "MyProject"); labels.put(MonitoringInfoConstants.Labels.BIGQUERY_DATASET, "MyDataset"); labels.put(MonitoringInfoConstants.Labels.BIGQUERY_TABLE, "MyTable"); // MonitoringInfoMetricName will copy labels. So its safe to reuse this reference. labels.put(MonitoringInfoConstants.Labels.STATUS, "ok"); MonitoringInfoMetricName okName = MonitoringInfoMetricName.named(urn, labels); labels.put(MonitoringInfoConstants.Labels.STATUS, "not_found"); MonitoringInfoMetricName notFoundName = MonitoringInfoMetricName.named(urn, labels); Set<String> allowedMetricUrns = new HashSet<String>(); allowedMetricUrns.add(MonitoringInfoConstants.Urns.API_REQUEST_COUNT); assertTrue(MetricsContainerImpl.matchMetric(okName, allowedMetricUrns)); assertTrue(MetricsContainerImpl.matchMetric(notFoundName, allowedMetricUrns)); MetricName userMetricName = MetricName.named("namespace", "name"); assertFalse(MetricsContainerImpl.matchMetric(userMetricName, allowedMetricUrns)); MetricName elementCountName = MonitoringInfoMetricName.named( MonitoringInfoConstants.Urns.ELEMENT_COUNT, Collections.singletonMap("name", "counter")); assertFalse(MetricsContainerImpl.matchMetric(elementCountName, allowedMetricUrns)); }
public int getTotalBackupCount() { return backupCount + asyncBackupCount; }
@Test public void getTotalBackupCount() { RingbufferConfig config = new RingbufferConfig(NAME); config.setAsyncBackupCount(2); config.setBackupCount(3); int result = config.getTotalBackupCount(); assertEquals(5, result); }
static SerializableFunction<Double, Double> getNumericPredictorEntry(final NumericPredictor numericPredictor) { boolean withExponent = !Objects.equals(1, numericPredictor.getExponent()); if (withExponent) { return input -> KiePMMLRegressionTable.evaluateNumericWithExponent(input, numericPredictor.getCoefficient().doubleValue(), numericPredictor.getExponent().doubleValue()); } else { return input -> KiePMMLRegressionTable.evaluateNumericWithoutExponent(input, numericPredictor.getCoefficient().doubleValue()); } }
@Test void getNumericPredictorEntryWithoutExponent() { String predictorName = "predictorName"; int exponent = 1; double coefficient = 1.23; NumericPredictor numericPredictor = PMMLModelTestUtils.getNumericPredictor(predictorName, exponent, coefficient); SerializableFunction<Double, Double> retrieved = KiePMMLRegressionTableFactory.getNumericPredictorEntry(numericPredictor); assertThat(retrieved).isNotNull(); }
public static CopyFilter getCopyFilter(Configuration conf) { String filtersClassName = conf .get(DistCpConstants.CONF_LABEL_FILTERS_CLASS); if (filtersClassName != null) { try { Class<? extends CopyFilter> filtersClass = conf .getClassByName(filtersClassName) .asSubclass(CopyFilter.class); filtersClassName = filtersClass.getName(); Constructor<? extends CopyFilter> constructor = filtersClass .getDeclaredConstructor(Configuration.class); return constructor.newInstance(conf); } catch (Exception e) { LOG.error(DistCpConstants.CLASS_INSTANTIATION_ERROR_MSG + filtersClassName, e); throw new RuntimeException( DistCpConstants.CLASS_INSTANTIATION_ERROR_MSG + filtersClassName, e); } } else { return getDefaultCopyFilter(conf); } }
@Test public void testGetCopyFilterRegexpInConfigurationFilter() { final String filterName = "org.apache.hadoop.tools.RegexpInConfigurationFilter"; Configuration configuration = new Configuration(false); configuration.set(DistCpConstants.CONF_LABEL_FILTERS_CLASS, filterName); CopyFilter copyFilter = CopyFilter.getCopyFilter(configuration); assertTrue("copyFilter should be instance of RegexpInConfigurationFilter", copyFilter instanceof RegexpInConfigurationFilter); }
@ScalarOperator(INDETERMINATE) @SqlType(StandardTypes.BOOLEAN) public static boolean indeterminate(@SqlType(StandardTypes.UUID) Slice value, @IsNull boolean isNull) { return isNull; }
@Test public void testIndeterminate() { assertOperator(INDETERMINATE, "CAST(null AS UUID)", BOOLEAN, true); assertOperator(INDETERMINATE, "UUID '12151fd2-7586-11e9-8f9e-2a86e4085a59'", BOOLEAN, false); }
@Override public void shutdown() { if(Monitors.isObjectRegistered(name, this)) { Monitors.unregisterObject(name, this); } TransportUtils.shutdown(eurekaHttpClientRef.getAndSet(null)); }
@Test public void testReconnectIsEnforcedAtConfiguredInterval() throws Exception { final AtomicReference<EurekaHttpClient> clientRef = new AtomicReference<>(firstClient); when(factory.newClient()).thenAnswer(new Answer<EurekaHttpClient>() { @Override public EurekaHttpClient answer(InvocationOnMock invocation) throws Throwable { return clientRef.get(); } }); SessionedEurekaHttpClient httpClient = null; try { httpClient = new SessionedEurekaHttpClient("test", factory, 1); httpClient.getApplications(); verify(firstClient, times(1)).getApplications(); clientRef.set(secondClient); Thread.sleep(2); httpClient.getApplications(); verify(secondClient, times(1)).getApplications(); } finally { if (httpClient != null) { httpClient.shutdown(); } } }
@Override public void processElement(StreamRecord<RowData> element) throws Exception { RowData inputRow = element.getValue(); long timestamp; if (windowAssigner.isEventTime()) { if (inputRow.isNullAt(rowtimeIndex)) { // null timestamp would be dropped numNullRowTimeRecordsDropped.inc(); return; } timestamp = inputRow.getTimestamp(rowtimeIndex, 3).getMillisecond(); } else { timestamp = getProcessingTimeService().getCurrentProcessingTime(); } timestamp = toUtcTimestampMills(timestamp, shiftTimeZone); Collection<TimeWindow> elementWindows = windowAssigner.assignWindows(inputRow, timestamp); collect(inputRow, elementWindows); }
@Test public void testProcessingTimeHopWindows() throws Exception { final SlidingWindowAssigner assigner = SlidingWindowAssigner.of(Duration.ofSeconds(3), Duration.ofSeconds(1)) .withProcessingTime(); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(assigner, shiftTimeZone); testHarness.setup(OUT_SERIALIZER); testHarness.open(); // process elements ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); // timestamp is ignored in processing time testHarness.setProcessingTime(20L); testHarness.processElement(insertRecord("key1", 1, Long.MAX_VALUE)); testHarness.setProcessingTime(3999L); testHarness.processElement(insertRecord("key2", 1, Long.MAX_VALUE)); // append 3 fields: window_start, window_end, window_time expectedOutput.add( insertRecord( "key1", 1, Long.MAX_VALUE, localMills(-2000L), localMills(1000L), 999L)); expectedOutput.add( insertRecord( "key1", 1, Long.MAX_VALUE, localMills(-1000L), localMills(2000L), 1999L)); expectedOutput.add( insertRecord("key1", 1, Long.MAX_VALUE, localMills(0L), localMills(3000L), 2999L)); expectedOutput.add( insertRecord( "key2", 1, Long.MAX_VALUE, localMills(1000L), localMills(4000L), 3999L)); expectedOutput.add( insertRecord( "key2", 1, Long.MAX_VALUE, localMills(2000L), localMills(5000L), 4999L)); expectedOutput.add( insertRecord( "key2", 1, Long.MAX_VALUE, localMills(3000L), localMills(6000L), 5999L)); ASSERTER.assertOutputEqualsSorted( "Output was not correct.", expectedOutput, testHarness.getOutput()); testHarness.close(); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatDropStreamStatementIfExists() { // Given: final DropStream dropStream = new DropStream(SOMETHING, true, false); // When: final String formatted = SqlFormatter.formatSql(dropStream); // Then: assertThat(formatted, is("DROP STREAM IF EXISTS SOMETHING")); }
public static String decimalFormatMoney(double value) { return decimalFormat(",##0.00", value); }
@Test public void decimalFormatMoneyTest() { final double c = 299792400.543534534; final String format = NumberUtil.decimalFormatMoney(c); assertEquals("299,792,400.54", format); final double value = 0.5; final String money = NumberUtil.decimalFormatMoney(value); assertEquals("0.50", money); }
@SafeVarargs public static <A> Map<A, A> pairingArrayMap(A... array) { return pairingArray(array, LinkedHashMap::new, Map::put); }
@Test public void test() { Map<Integer, Integer> maps = CollectionUtils.pairingArrayMap(1, 2, 3, 4, 5); assertEquals(2, maps.size()); assertEquals(Integer.valueOf(2), maps.get(1)); assertEquals(Integer.valueOf(4), maps.get(3)); }
static String computeDetailsAsString(SearchRequest searchRequest) { StringBuilder message = new StringBuilder(); message.append(String.format("ES search request '%s'", searchRequest)); if (searchRequest.indices().length > 0) { message.append(String.format(ON_INDICES_MESSAGE, Arrays.toString(searchRequest.indices()))); } return message.toString(); }
@Test public void should_format_ClusterHealthRequest() { ClusterHealthRequest request = new ClusterHealthRequest("index-1"); assertThat(EsRequestDetails.computeDetailsAsString(request)) .isEqualTo("ES cluster health request on indices 'index-1'"); }
static private BufferedReader toBufferedReader(Reader reader) { return reader instanceof BufferedReader ? (BufferedReader) reader : new BufferedReader(reader); }
@Test public void testToBufferedReader() throws Exception { StringBuilder sb = new StringBuilder(); for (int i = 0; i < 10; i++) { sb.append("testToBufferedReader").append("\n"); } StringReader reader = new StringReader(sb.toString()); Method method = IOTinyUtils.class.getDeclaredMethod("toBufferedReader", new Class[]{Reader.class}); method.setAccessible(true); Object bReader = method.invoke(IOTinyUtils.class, reader); assertTrue(bReader instanceof BufferedReader); }
@Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { if (this instanceof SimpleBuildStep) { // delegate to the overloaded version defined in SimpleBuildStep final SimpleBuildStep step = (SimpleBuildStep) this; final FilePath workspace = build.getWorkspace(); if (step.requiresWorkspace() && workspace == null) { throw new AbortException("no workspace for " + build); } if (workspace != null) { // if we have one, provide it regardless of whether it's _required_ step.perform(build, workspace, build.getEnvironment(listener), launcher, listener); } else { step.perform(build, build.getEnvironment(listener), listener); } return true; } else if (build instanceof Build) { // delegate to the legacy signature deprecated in 1.312 return perform((Build) build, launcher, listener); } else { return true; } }
@Issue("JENKINS-18734") @Test @SuppressWarnings("deprecation") public void testPerform() throws InterruptedException, IOException { FreeStyleBuild mock = Mockito.mock(FreeStyleBuild.class, Mockito.CALLS_REAL_METHODS); BuildStepCompatibilityLayer bscl = new BuildStepCompatibilityLayer() { @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) { return true; } }; assertTrue(bscl.perform(mock, null, null)); }
public HikariDataSource getDataSource() { return ds; }
@Test @Ignore public void testGetMariaDataSource() { DataSource ds = SingletonServiceFactory.getBean(MariaDataSource.class).getDataSource(); assertNotNull(ds); try(Connection connection = ds.getConnection()){ assertNotNull(connection); } catch (SQLException e) { e.printStackTrace(); } }
public static <T extends Comparable<? super T>> Range<T> ofString(String str, Function<String, T> converter, Class<T> clazz) { if(str.equals(EMPTY)) { return emptyRange(clazz); } int mask = str.charAt(0) == '[' ? LOWER_INCLUSIVE : LOWER_EXCLUSIVE; mask |= str.charAt(str.length() - 1) == ']' ? UPPER_INCLUSIVE : UPPER_EXCLUSIVE; int delim = str.indexOf(','); if (delim == -1) { throw new IllegalArgumentException("Cannot find comma character"); } String lowerStr = str.substring(1, delim); String upperStr = str.substring(delim + 1, str.length() - 1); if (lowerStr.length() == 0 || lowerStr.endsWith(INFINITY)) { mask |= LOWER_INFINITE; } if (upperStr.length() == 0 || upperStr.endsWith(INFINITY)) { mask |= UPPER_INFINITE; } T lower = null; T upper = null; if ((mask & LOWER_INFINITE) != LOWER_INFINITE) { lower = converter.apply(lowerStr); } if ((mask & UPPER_INFINITE) != UPPER_INFINITE) { upper = converter.apply(upperStr); } return new Range<>(lower, upper, mask, clazz); }
@Test public void ofStringTest() { assertThat(integerRange("[1,3]").lower(), is(1)); assertThat(integerRange("[1,3]").upper(), is(3)); assertThat(integerRange("[1,3]").isUpperBoundClosed(), is(true)); assertThat(integerRange("[1,3]").isLowerBoundClosed(), is(true)); assertThat(integerRange("[,3]").lower(), is(nullValue())); assertThat(integerRange("[,3]").upper(), is(3)); assertThat(integerRange("[,3]").hasLowerBound(), is(false)); assertThat(integerRange("[,3]").hasUpperBound(), is(true)); assertThat(integerRange("[,3]").isUpperBoundClosed(), is(true)); assertThat(integerRange("[,3]").isLowerBoundClosed(), is(false)); assertThat(integerRange("[,]").lower(), is(nullValue())); assertThat(integerRange("[,]").upper(), is(nullValue())); assertThat(integerRange("[,]").hasLowerBound(), is(false)); assertThat(integerRange("[,]").hasUpperBound(), is(false)); assertThat(integerRange("[,]").isUpperBoundClosed(), is(false)); assertThat(integerRange("[,]").isLowerBoundClosed(), is(false)); assertThat(integerRange("(-5,5]").isUpperBoundClosed(), is(true)); assertThat(integerRange("(-5,5]").isLowerBoundClosed(), is(false)); assertThat(integerRange("(,)").contains(integerRange("empty")), is(true)); assertThat(integerRange("empty").contains(integerRange("(,)")), is(false)); }
public Collection<JID> getAdmins() { return administrators; }
@Test public void testAddFullJid() throws Exception { // Setup test fixture. final String groupName = "unit-test-group-i"; final Group group = groupManager.createGroup(groupName); final JID fullJid = new JID("unit-test-user-i", "example.org", "unit-test-resource-i"); final JID bareJid = fullJid.asBareJID(); // Execute system under test. final boolean result = group.getAdmins().add(fullJid); // Verify results. assertTrue(result); assertTrue(group.getAdmins().contains(fullJid)); assertTrue(group.getAdmins().contains(bareJid)); }
@Override public BeamSqlTable buildBeamSqlTable(Table table) { return new MongoDbTable(table); }
@Test public void testBuildBeamSqlTable_withBadLocation_throwsException() { ImmutableList<String> badLocations = ImmutableList.of( "mongodb://localhost:27017/database/", "mongodb://localhost:27017/database", "localhost:27017/database/collection", "mongodb://:27017/database/collection", "mongodb://localhost:27017//collection", "mongodb://localhost/database/collection", "mongodb://localhost:/database/collection"); for (String badLocation : badLocations) { Table table = fakeTable("TEST", badLocation); assertThrows(IllegalArgumentException.class, () -> provider.buildBeamSqlTable(table)); } }