focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override protected void copy(List<S3ResourceId> sourcePaths, List<S3ResourceId> destinationPaths) throws IOException { checkArgument( sourcePaths.size() == destinationPaths.size(), "sizes of sourcePaths and destinationPaths do not match"); List<Callable<Void>> tasks = new ArrayList<>(sourcePaths.size()); Iterator<S3ResourceId> sourcePathsIterator = sourcePaths.iterator(); Iterator<S3ResourceId> destinationPathsIterator = destinationPaths.iterator(); while (sourcePathsIterator.hasNext()) { final S3ResourceId sourcePath = sourcePathsIterator.next(); final S3ResourceId destinationPath = destinationPathsIterator.next(); tasks.add( () -> { copy(sourcePath, destinationPath); return null; }); } callTasks(tasks); }
@Test public void testCopy() throws IOException { testCopy(s3Config("s3")); testCopy(s3Config("other")); testCopy(s3ConfigWithSSECustomerKey("s3")); testCopy(s3ConfigWithSSECustomerKey("other")); }
public static List<PartitionSpec> getPartitionspecsGroupedByStorageDescriptor(Table table, Collection<Partition> partitions) { final String tablePath = table.getSd().getLocation(); ImmutableListMultimap<StorageDescriptorKey, Partition> partitionsWithinTableDirectory = Multimaps.index(partitions, input -> { // if sd is not in the list of projected fields, all the partitions // can be just grouped in PartitionSpec object if (input.getSd() == null) { return StorageDescriptorKey.UNSET_KEY; } // if sd has skewed columns we better not group partition, since different partitions // could have different skewed info like skewed location if (input.getSd().getSkewedInfo() != null && input.getSd().getSkewedInfo().getSkewedColNames() != null && !input.getSd().getSkewedInfo().getSkewedColNames().isEmpty()) { return new StorageDescriptorKey(input.getSd()); } // if partitions don't have the same number of buckets we can not group their SD, // this could lead to incorrect number of buckets if (input.getSd().getNumBuckets() != partitions.iterator().next().getSd().getNumBuckets()) { return new StorageDescriptorKey(input.getSd()); } // if the partition is within table, use the tableSDKey to group it with other partitions // within the table directory if (input.getSd().getLocation() != null && input.getSd().getLocation() .startsWith(tablePath)) { return new StorageDescriptorKey(tablePath, input.getSd()); } // if partitions are located outside table location we treat them as non-standard // and do not perform any grouping // if the location is not set partitions are grouped according to the rest of the SD fields return new StorageDescriptorKey(input.getSd()); }); List<PartitionSpec> partSpecs = new ArrayList<>(); // Classify partitions based on shared SD properties. Map<StorageDescriptorKey, List<PartitionWithoutSD>> sdToPartList = new HashMap<>(); // we don't expect partitions to exist outside directory in most cases List<Partition> partitionsOutsideTableDir = new ArrayList<>(0); for (StorageDescriptorKey key : partitionsWithinTableDirectory.keySet()) { boolean isUnsetKey = key.equals(StorageDescriptorKey.UNSET_KEY); // group the partitions together when // case I : sd is not set because it was not in the requested fields // case II : when sd.location is not set because it was not in the requested fields // case III : when sd.location is set and it is located within table directory if (isUnsetKey || key.baseLocation == null || key.baseLocation.equals(tablePath)) { for (Partition partition : partitionsWithinTableDirectory.get(key)) { PartitionWithoutSD partitionWithoutSD = new PartitionWithoutSD(); partitionWithoutSD.setValues(partition.getValues()); partitionWithoutSD.setCreateTime(partition.getCreateTime()); partitionWithoutSD.setLastAccessTime(partition.getLastAccessTime()); partitionWithoutSD.setRelativePath( (isUnsetKey || !partition.getSd().isSetLocation()) ? null : partition.getSd() .getLocation().substring(tablePath.length())); partitionWithoutSD.setParameters(partition.getParameters()); if (!sdToPartList.containsKey(key)) { sdToPartList.put(key, new ArrayList<>()); } sdToPartList.get(key).add(partitionWithoutSD); } } else { // Lump all partitions outside the tablePath into one PartSpec. // if non-standard partitions need not be deDuped create PartitionListComposingSpec // this will be used mostly for keeping backwards compatibility with some HMS APIs which use // PartitionListComposingSpec for non-standard partitions located outside table partitionsOutsideTableDir.addAll(partitionsWithinTableDirectory.get(key)); } } // create sharedSDPartSpec for all the groupings for (Map.Entry<StorageDescriptorKey, List<PartitionWithoutSD>> entry : sdToPartList .entrySet()) { partSpecs.add(getSharedSDPartSpec(table, entry.getKey(), entry.getValue())); } if (!partitionsOutsideTableDir.isEmpty()) { PartitionSpec partListSpec = new PartitionSpec(); partListSpec.setCatName(table.getCatName()); partListSpec.setDbName(table.getDbName()); partListSpec.setTableName(table.getTableName()); partListSpec.setPartitionList(new PartitionListComposingSpec(partitionsOutsideTableDir)); partSpecs.add(partListSpec); } return partSpecs; }
@Test public void testGetPartitionspecsGroupedBySDonePartitionCombined() throws MetaException { // Create database and table String sharedInputFormat = "foo1"; Table tbl = new TableBuilder() .setDbName(DB_NAME) .setTableName(TABLE_NAME) .addCol("id", "int") .setLocation("/foo") .build(null); Partition p1 = new PartitionBuilder() .setDbName("DB_NAME") .setTableName(TABLE_NAME) .setLocation("/foo/bar") .addCol("a1", "int") .addValue("val1") .setInputFormat(sharedInputFormat) .build(null); Partition p2 = new PartitionBuilder() .setDbName("DB_NAME") .setTableName(TABLE_NAME) .setLocation("/a/b") .addCol("a2", "int") .addValue("val2") .setInputFormat("foo2") .build(null); Partition p3 = new PartitionBuilder() .setDbName("DB_NAME") .setTableName(TABLE_NAME) .addCol("a3", "int") .addValue("val3") .setInputFormat("foo3") .build(null); Partition p4 = new PartitionBuilder() .setDbName("DB_NAME") .setTableName("TABLE_NAME") .setLocation("/foo/baz") .addCol("a1", "int") .addValue("val4") .setInputFormat(sharedInputFormat) .build(null); p3.unsetSd(); List<PartitionSpec> result = MetaStoreServerUtils.getPartitionspecsGroupedByStorageDescriptor(tbl, Arrays.asList(p1, p2, p3, p4)); assertThat(result.size(), is(3)); PartitionSpec ps1 = result.get(0); assertThat(ps1.getRootPath(), is((String)null)); assertThat(ps1.getPartitionList(), is((List<Partition>)null)); PartitionSpecWithSharedSD partSpec = ps1.getSharedSDPartitionSpec(); List<PartitionWithoutSD> partitions1 = partSpec.getPartitions(); assertThat(partitions1.size(), is(1)); PartitionWithoutSD partition1 = partitions1.get(0); assertThat(partition1.getRelativePath(), is((String)null)); assertThat(partition1.getValues(), is(Collections.singletonList("val3"))); PartitionSpec ps2 = result.get(1); assertThat(ps2.getRootPath(), is(tbl.getSd().getLocation())); assertThat(ps2.getPartitionList(), is((List<Partition>)null)); List<PartitionWithoutSD> partitions2 = ps2.getSharedSDPartitionSpec().getPartitions(); assertThat(partitions2.size(), is(2)); PartitionWithoutSD partition2_1 = partitions2.get(0); PartitionWithoutSD partition2_2 = partitions2.get(1); if (partition2_1.getRelativePath().equals("baz")) { // Swap p2_1 and p2_2 PartitionWithoutSD tmp = partition2_1; partition2_1 = partition2_2; partition2_2 = tmp; } assertThat(partition2_1.getRelativePath(), is("/bar")); assertThat(partition2_1.getValues(), is(Collections.singletonList("val1"))); assertThat(partition2_2.getRelativePath(), is("/baz")); assertThat(partition2_2.getValues(), is(Collections.singletonList("val4"))); PartitionSpec ps4 = result.get(2); assertThat(ps4.getRootPath(), is((String)null)); assertThat(ps4.getSharedSDPartitionSpec(), is((PartitionSpecWithSharedSD)null)); List<Partition>partitions = ps4.getPartitionList().getPartitions(); assertThat(partitions.size(), is(1)); Partition partition = partitions.get(0); assertThat(partition.getSd().getLocation(), is("/a/b")); assertThat(partition.getValues(), is(Collections.singletonList("val2"))); }
@Override public void rotate(IndexSet indexSet) { indexRotator.rotate(indexSet, this::shouldRotate); }
@Test public void testRotate() { when(indices.getStoreSizeInBytes("name")).thenReturn(Optional.of(1000L)); when(indexSet.getNewestIndex()).thenReturn("name"); when(indexSet.getConfig()).thenReturn(indexSetConfig); when(indexSetConfig.rotationStrategyConfig()).thenReturn(SizeBasedRotationStrategyConfig.create(100L)); final SizeBasedRotationStrategy strategy = createStrategy(); strategy.rotate(indexSet); verify(indexSet, times(1)).cycle(); reset(indexSet); }
public void hasValue(@Nullable Object expected) { if (expected == null) { throw new NullPointerException("Optional cannot have a null value."); } if (actual == null) { failWithActual("expected an optional with value", expected); } else if (!actual.isPresent()) { failWithoutActual(fact("expected to have value", expected), simpleFact("but was absent")); } else { checkNoNeedToDisplayBothValues("get()").that(actual.get()).isEqualTo(expected); } }
@Test public void hasValue_npeWithNullParameter() { try { assertThat(Optional.of("foo")).hasValue(null); fail("Expected NPE"); } catch (NullPointerException expected) { assertThat(expected).hasMessageThat().contains("Optional"); } }
@CheckReturnValue @NonNull public static Observable<Boolean> observePowerSavingState( @NonNull Context context, @StringRes int enablePrefResId, @BoolRes int defaultValueResId) { final RxSharedPrefs prefs = AnyApplication.prefs(context); return Observable.combineLatest( prefs .getString( R.string.settings_key_power_save_mode, R.string.settings_default_power_save_mode_value) .asObservable(), enablePrefResId == 0 ? Observable.just(true) : prefs.getBoolean(enablePrefResId, defaultValueResId).asObservable(), RxBroadcastReceivers.fromIntentFilter( context.getApplicationContext(), getBatteryStateIntentFilter()) .startWith(new Intent(Intent.ACTION_BATTERY_OKAY)), RxBroadcastReceivers.fromIntentFilter( context.getApplicationContext(), getChargerStateIntentFilter()) .startWith(new Intent(Intent.ACTION_POWER_DISCONNECTED)), getOsPowerSavingStateObservable(context), (powerSavingPref, enabledPref, batteryIntent, chargerIntent, osPowerSavingState) -> { if (!enabledPref) return false; switch (powerSavingPref) { case "never": return false; case "always": return true; default: return osPowerSavingState || (Intent.ACTION_BATTERY_LOW.equals(batteryIntent.getAction()) && Intent.ACTION_POWER_DISCONNECTED.equals(chargerIntent.getAction())); } }) .distinctUntilChanged(); }
@Test public void testWhenLowPowerSavingMode() { AtomicReference<Boolean> state = new AtomicReference<>(null); final Observable<Boolean> powerSavingState = PowerSaving.observePowerSavingState(getApplicationContext(), 0); Assert.assertNull(state.get()); final Disposable disposable = powerSavingState.subscribe(state::set); // starts as false Assert.assertEquals(Boolean.FALSE, state.get()); sendBatteryState(false); Assert.assertEquals(Boolean.FALSE, state.get()); sendBatteryState(true); Assert.assertEquals(Boolean.TRUE, state.get()); sendBatteryState(false); Assert.assertEquals(Boolean.FALSE, state.get()); sendBatteryState(true); Assert.assertEquals(Boolean.TRUE, state.get()); sendChargingState(false); Assert.assertEquals(Boolean.TRUE, state.get()); sendBatteryState(true); Assert.assertEquals(Boolean.TRUE, state.get()); sendChargingState(true); Assert.assertEquals(Boolean.FALSE, state.get()); sendChargingState(false); Assert.assertEquals(Boolean.TRUE, state.get()); disposable.dispose(); sendBatteryState(true); Assert.assertEquals(Boolean.TRUE, state.get()); sendBatteryState(false); Assert.assertEquals(Boolean.TRUE, state.get()); sendChargingState(true); Assert.assertEquals(Boolean.TRUE, state.get()); sendChargingState(false); Assert.assertEquals(Boolean.TRUE, state.get()); }
public static <T> CompletionStage<T> recover(CompletionStage<T> completionStage, Function<Throwable, T> exceptionHandler){ return completionStage.exceptionally(exceptionHandler); }
@Test public void shouldReturnResult() throws Exception { CompletableFuture<String> future = CompletableFuture.completedFuture("result"); String result = recover(future, (e) -> "fallback").toCompletableFuture() .get(1, TimeUnit.SECONDS); assertThat(result).isEqualTo("result"); }
@Override public Long createTag(MemberTagCreateReqVO createReqVO) { // 校验名称唯一 validateTagNameUnique(null, createReqVO.getName()); // 插入 MemberTagDO tag = MemberTagConvert.INSTANCE.convert(createReqVO); memberTagMapper.insert(tag); // 返回 return tag.getId(); }
@Test public void testCreateTag_success() { // 准备参数 MemberTagCreateReqVO reqVO = randomPojo(MemberTagCreateReqVO.class); // 调用 Long tagId = tagService.createTag(reqVO); // 断言 assertNotNull(tagId); // 校验记录的属性是否正确 MemberTagDO tag = tagMapper.selectById(tagId); assertPojoEquals(reqVO, tag); }
public Integer put(final K key, final Integer value) { return valueOrNull(put(key, (int)value)); }
@Test void shouldCopyConstructAndBeEqual() { final int[] testEntries = { 3, 1, 19, 7, 11, 12, 7 }; for (final int testEntry : testEntries) { objectToIntMap.put(String.valueOf(testEntry), testEntry); } final Object2IntHashMap<String> mapCopy = new Object2IntHashMap<>(objectToIntMap); assertThat(mapCopy, is(objectToIntMap)); }
public static CommonsConfigurationCircuitBreakerConfiguration of(final Configuration configuration) throws ConfigParseException{ CommonsConfigurationCircuitBreakerConfiguration obj = new CommonsConfigurationCircuitBreakerConfiguration(); try { obj.getConfigs().putAll(obj.getProperties(configuration.subset(CIRCUITBREAKER_CONFIGS_PREFIX))); obj.getInstances().putAll(obj.getProperties(configuration.subset(CIRCUITBREAKER_INSTANCES_PREFIX))); return obj; } catch (Exception ex) { throw new ConfigParseException("Error creating circuitbreaker configuration", ex); } }
@Test public void testFromPropertiesFile() throws ConfigurationException { Configuration config = CommonsConfigurationUtil.getConfiguration(PropertiesConfiguration.class, TestConstants.RESILIENCE_CONFIG_PROPERTIES_FILE_NAME); CommonsConfigurationCircuitBreakerConfiguration commonsConfigurationCircuitBreakerConfiguration = CommonsConfigurationCircuitBreakerConfiguration.of(config); assertConfigs(commonsConfigurationCircuitBreakerConfiguration.getConfigs()); assertInstances(commonsConfigurationCircuitBreakerConfiguration.getInstances()); }
@Override public Optional<ShardingConditionValue> generate(final InExpression predicate, final Column column, final List<Object> params, final TimestampServiceRule timestampServiceRule) { if (predicate.isNot()) { return Optional.empty(); } Collection<ExpressionSegment> expressionSegments = predicate.getExpressionList(); List<Integer> parameterMarkerIndexes = new ArrayList<>(expressionSegments.size()); List<Comparable<?>> shardingConditionValues = new LinkedList<>(); for (ExpressionSegment each : expressionSegments) { ConditionValue conditionValue = new ConditionValue(each, params); Optional<Comparable<?>> value = conditionValue.getValue(); if (conditionValue.isNull()) { shardingConditionValues.add(null); conditionValue.getParameterMarkerIndex().ifPresent(parameterMarkerIndexes::add); continue; } if (value.isPresent()) { shardingConditionValues.add(value.get()); conditionValue.getParameterMarkerIndex().ifPresent(parameterMarkerIndexes::add); continue; } if (ExpressionConditionUtils.isNowExpression(each)) { shardingConditionValues.add(timestampServiceRule.getTimestamp()); } } return shardingConditionValues.isEmpty() ? Optional.empty() : Optional.of(new ListShardingConditionValue<>(column.getName(), column.getTableName(), shardingConditionValues, parameterMarkerIndexes)); }
@Test void assertGenerateConditionValueWithoutParameter() { ColumnSegment left = new ColumnSegment(0, 0, new IdentifierValue("order_id")); ListExpression right = new ListExpression(0, 0); right.getItems().add(new ParameterMarkerExpressionSegment(0, 0, 0)); InExpression predicate = new InExpression(0, 0, left, right, false); Optional<ShardingConditionValue> actual = generator.generate(predicate, column, new LinkedList<>(), timestampServiceRule); assertFalse(actual.isPresent()); }
private static String sanitizeTimestamp(long micros, long now) { String isPast = now > micros ? "ago" : "from-now"; long diff = Math.abs(now - micros); if (diff < FIVE_MINUTES_IN_MICROS) { return "(timestamp-about-now)"; } long hours = TimeUnit.MICROSECONDS.toHours(diff); if (hours <= THREE_DAYS_IN_HOURS) { return "(timestamp-" + hours + "-hours-" + isPast + ")"; } else if (hours < NINETY_DAYS_IN_HOURS) { long days = hours / 24; return "(timestamp-" + days + "-days-" + isPast + ")"; } return "(timestamp)"; }
@Test public void testSanitizeTimestamp() { for (String timestamp : Lists.newArrayList( "2022-04-29T23:49:51", "2022-04-29T23:49:51.123456", "2022-04-29T23:49:51-07:00", "2022-04-29T23:49:51.123456+01:00")) { assertEquals( Expressions.equal("test", "(timestamp)"), ExpressionUtil.sanitize(Expressions.equal("test", timestamp))); assertEquals( Expressions.equal("ts", "(timestamp)"), ExpressionUtil.sanitize(STRUCT, Expressions.equal("ts", timestamp), true)); assertThat(ExpressionUtil.toSanitizedString(Expressions.equal("test", timestamp))) .as("Sanitized string should be identical except for descriptive literal") .isEqualTo("test = (timestamp)"); assertThat(ExpressionUtil.toSanitizedString(STRUCT, Expressions.equal("ts", timestamp), true)) .as("Sanitized string should be identical except for descriptive literal") .isEqualTo("ts = (timestamp)"); } }
public static MetadataCoderV2 of() { return INSTANCE; }
@Test public void testEncodeDecodeWithDefaultLastModifiedMills() throws Exception { Path filePath = tmpFolder.newFile("somefile").toPath(); Metadata metadata = Metadata.builder() .setResourceId( FileSystems.matchNewResource(filePath.toString(), false /* isDirectory */)) .setIsReadSeekEfficient(true) .setSizeBytes(1024) .build(); CoderProperties.coderDecodeEncodeEqual(MetadataCoderV2.of(), metadata); }
@Override public TapiNodeRef getNodeRef(TapiNodeRef nodeRef) throws NoSuchElementException { updateCache(); TapiNodeRef ret = null; try { ret = tapiNodeRefList.stream() .filter(nodeRef::equals) .findFirst().get(); } catch (NoSuchElementException e) { log.error("Node not found of {}", nodeRef); throw e; } return ret; }
@Test public void testGetNodeRef() { tapiResolver.addNodeRef(nodeRef); assertThat(nodeRef, is(tapiResolver.getNodeRef(deviceId))); }
@Override public byte[] fromConnectHeader(String topic, String headerKey, Schema schema, Object value) { return fromConnectData(topic, schema, value); }
@Test public void testStringHeaderValueToBytes() { assertArrayEquals(Utils.utf8(SAMPLE_STRING), converter.fromConnectHeader(TOPIC, "hdr", Schema.STRING_SCHEMA, SAMPLE_STRING)); }
@VisibleForTesting static DateRangeBucket buildDateRangeBuckets(TimeRange timeRange, long searchWithinMs, long executeEveryMs) { final ImmutableList.Builder<DateRange> ranges = ImmutableList.builder(); DateTime from = timeRange.getFrom(); DateTime to; do { // The smallest configurable unit is 1 sec. // By dividing it before casting we avoid a potential int overflow to = from.plusSeconds((int) (searchWithinMs / 1000)); ranges.add(DateRange.builder().from(from).to(to).build()); from = from.plusSeconds((int) executeEveryMs / 1000); } while (to.isBefore(timeRange.getTo())); return DateRangeBucket.builder().field("timestamp").ranges(ranges.build()).build(); }
@Test public void testDateRangeBucketWithCatchUpTumblingWindows() { final long processingWindowSize = Duration.standardSeconds(60).getMillis(); final long processingHopSize = Duration.standardSeconds(60).getMillis(); final DateTime now = DateTime.now(DateTimeZone.UTC); final DateTime from = now; // We are 3 full processingWindows behind final DateTime to = now.plusMillis((int) processingWindowSize * 3); TimeRange timeRange = AbsoluteRange.create(from, to); final DateRangeBucket rangeBucket = PivotAggregationSearch.buildDateRangeBuckets(timeRange, processingWindowSize, processingHopSize); assertThat(rangeBucket.ranges()).containsExactly( DateRange.create(from.plusMillis((int) (processingWindowSize * 0)), from.plusMillis((int) (processingWindowSize * 1))), DateRange.create(from.plusMillis((int) (processingWindowSize * 1)), from.plusMillis((int) (processingWindowSize * 2))), DateRange.create(from.plusMillis((int) (processingWindowSize * 2)), from.plusMillis((int) (processingWindowSize * 3))) ); }
public static List<String> splitPath(String path) { List<String> results = new ArrayList<>(); String[] components = path.split("/"); for (String component : components) { if (!component.isEmpty()) { results.add(component); } } return results; }
@Test public void testSplitPath() { assertEquals(Arrays.asList("alpha", "beta"), CommandUtils.splitPath("/alpha/beta")); assertEquals(Arrays.asList("alpha", "beta"), CommandUtils.splitPath("//alpha/beta/")); }
@ApiOperation(value = "Get Edge Install Instructions (getEdgeInstallInstructions)", notes = "Get an install instructions for provided edge id." + TENANT_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAnyAuthority('TENANT_ADMIN')") @GetMapping(value = "/edge/instructions/install/{edgeId}/{method}") public EdgeInstructions getEdgeInstallInstructions( @Parameter(description = EDGE_ID_PARAM_DESCRIPTION, required = true) @PathVariable("edgeId") String strEdgeId, @Parameter(description = "Installation method ('docker', 'ubuntu' or 'centos')", schema = @Schema(allowableValues = {"docker", "ubuntu", "centos"})) @PathVariable("method") String installationMethod, HttpServletRequest request) throws ThingsboardException { if (isEdgesEnabled() && edgeInstallServiceOpt.isPresent()) { EdgeId edgeId = new EdgeId(toUUID(strEdgeId)); edgeId = checkNotNull(edgeId); Edge edge = checkEdgeId(edgeId, Operation.READ); return checkNotNull(edgeInstallServiceOpt.get().getInstallInstructions(edge, installationMethod, request)); } else { throw new ThingsboardException("Edges support disabled", ThingsboardErrorCode.GENERAL); } }
@Test public void testGetEdgeInstallInstructions() throws Exception { Edge edge = constructEdge(tenantId, "Edge for Test Docker Install Instructions", "default", "7390c3a6-69b0-9910-d155-b90aca4b772e", "l7q4zsjplzwhk16geqxy"); Edge savedEdge = doPost("/api/edge", edge, Edge.class); String installInstructions = doGet("/api/edge/instructions/install/" + savedEdge.getId().getId().toString() + "/docker", String.class); Assert.assertTrue(installInstructions.contains("l7q4zsjplzwhk16geqxy")); Assert.assertTrue(installInstructions.contains("7390c3a6-69b0-9910-d155-b90aca4b772e")); }
@Override public List<String> getWorkerUuids() { Set<CeWorker> workers = ceWorkerFactory.getWorkers(); return workers.stream() .map(CeWorker::getUUID) .sorted() .toList(); }
@Test public void getWorkerUuids_returns_ordered_list_of_uuids_of_worker_from_CeWorkerFactory_instance() { List<String> workerUuids = underTest.getWorkerUuids(); assertThat(workerUuids). isEqualTo(WORKERS.stream().map(CeWorker::getUUID).sorted().toList()) // ImmutableSet can not be serialized .isNotInstanceOf(ImmutableSet.class); }
String appendOneLevelOfId(String baseIdSegment, String id) { if ("".equals(baseIdSegment)) return id.split("/")[0]; String theRest = id.substring(baseIdSegment.length()); if ("".equals(theRest)) return id; theRest = theRest.replaceFirst("/", ""); String theRestFirstSeg = theRest.split("/")[0]; return baseIdSegment+"/"+theRestFirstSeg; }
@Test public void testAppendIdsInNonRecursiveListing() { TenantApplications applications = createTenantApplications(tenantName, curator, configserverConfig, new MockConfigActivationListener(), new InMemoryFlagSource()); assertEquals(applications.appendOneLevelOfId("search/music", "search/music/container/default/qr.0"), "search/music/container"); assertEquals(applications.appendOneLevelOfId("search", "search/music/container/default/qr.0"), "search/music"); assertEquals(applications.appendOneLevelOfId("search/music/container/default/qr.0", "search/music/container/default/qr.0"), "search/music/container/default/qr.0"); assertEquals(applications.appendOneLevelOfId("", "search/music/container/default/qr.0"), "search"); }
public FunctionInvokerProvider(FunctionAndTypeManager functionAndTypeManager) { this.functionAndTypeManager = functionAndTypeManager; }
@Test public void testFunctionInvokerProvider() { assertTrue(checkChoice( ImmutableList.of( new ArgumentProperty(VALUE_TYPE, Optional.of(USE_BOXED_TYPE), Optional.empty()), new ArgumentProperty(VALUE_TYPE, Optional.of(USE_BOXED_TYPE), Optional.empty())), true, false, Optional.of(new InvocationConvention(ImmutableList.of(BOXED_NULLABLE, BOXED_NULLABLE), InvocationReturnConvention.NULLABLE_RETURN, false)))); assertTrue(checkChoice( ImmutableList.of( new ArgumentProperty(VALUE_TYPE, Optional.of(RETURN_NULL_ON_NULL), Optional.empty()), new ArgumentProperty(VALUE_TYPE, Optional.of(BLOCK_AND_POSITION), Optional.empty()), new ArgumentProperty(VALUE_TYPE, Optional.of(BLOCK_AND_POSITION), Optional.empty())), false, false, Optional.of(new InvocationConvention(ImmutableList.of(NEVER_NULL, BLOCK_POSITION, BLOCK_POSITION), InvocationReturnConvention.FAIL_ON_NULL, false)))); assertTrue(checkChoice( ImmutableList.of( new ArgumentProperty(VALUE_TYPE, Optional.of(BLOCK_AND_POSITION), Optional.empty()), new ArgumentProperty(VALUE_TYPE, Optional.of(USE_NULL_FLAG), Optional.empty()), new ArgumentProperty(VALUE_TYPE, Optional.of(BLOCK_AND_POSITION), Optional.empty())), false, false, Optional.of(new InvocationConvention(ImmutableList.of(BLOCK_POSITION, NULL_FLAG, BLOCK_POSITION), InvocationReturnConvention.FAIL_ON_NULL, false)))); assertFalse(checkChoice( ImmutableList.of( new ArgumentProperty(VALUE_TYPE, Optional.of(BLOCK_AND_POSITION), Optional.empty()), new ArgumentProperty(VALUE_TYPE, Optional.of(USE_BOXED_TYPE), Optional.empty())), false, false, Optional.of(new InvocationConvention(ImmutableList.of(BLOCK_POSITION, BOXED_NULLABLE), InvocationReturnConvention.NULLABLE_RETURN, false)))); assertFalse(checkChoice( ImmutableList.of( new ArgumentProperty(VALUE_TYPE, Optional.of(BLOCK_AND_POSITION), Optional.empty()), new ArgumentProperty(VALUE_TYPE, Optional.of(BLOCK_AND_POSITION), Optional.empty())), false, false, Optional.of(new InvocationConvention(ImmutableList.of(BLOCK_POSITION, NULL_FLAG), InvocationReturnConvention.NULLABLE_RETURN, false)))); assertFalse(checkChoice( ImmutableList.of( new ArgumentProperty(VALUE_TYPE, Optional.of(USE_NULL_FLAG), Optional.empty()), new ArgumentProperty(VALUE_TYPE, Optional.of(USE_BOXED_TYPE), Optional.empty())), true, false, Optional.of(new InvocationConvention(ImmutableList.of(BLOCK_POSITION, BOXED_NULLABLE), InvocationReturnConvention.FAIL_ON_NULL, false)))); }
public static URL urlForResource(String location) throws MalformedURLException, FileNotFoundException { if (location == null) { throw new NullPointerException("location is required"); } URL url = null; if (!location.matches(SCHEME_PATTERN)) { url = Loader.getResourceBySelfClassLoader(location); } else if (location.startsWith(CLASSPATH_SCHEME)) { String path = location.substring(CLASSPATH_SCHEME.length()); if (path.startsWith("/")) { path = path.substring(1); } if (path.length() == 0) { throw new MalformedURLException("path is required"); } url = Loader.getResourceBySelfClassLoader(path); } else { url = new URL(location); } if (url == null) { throw new FileNotFoundException(location); } return url; }
@Test public void testImplicitClasspathUrl() throws Exception { URL url = LocationUtil.urlForResource(TEST_CLASSPATH_RESOURCE); validateResource(url); }
@Nullable public synchronized Table getTable(long dbId, long tableId) { RecycleTableInfo tableInfo = idToTableInfo.row(dbId).get(tableId); return tableInfo != null ? tableInfo.table : null; }
@Test public void testGetTable() { CatalogRecycleBin bin = new CatalogRecycleBin(); Table table = new Table(1L, "tbl", Table.TableType.HIVE, Lists.newArrayList()); bin.recycleTable(11L, table, true); Table table2 = new Table(2L, "tbl", Table.TableType.HIVE, Lists.newArrayList()); bin.recycleTable(11L, table2, true); Assert.assertFalse(bin.isTableRecoverable(11L, 1L)); Assert.assertNotNull(bin.getTable(11L, 1L)); Assert.assertTrue(bin.isTableRecoverable(11L, 2L)); Assert.assertNotNull(bin.getTable(11L, 2L)); List<Table> tables = bin.getTables(11L); Assert.assertEquals(2, tables.size()); }
@VisibleForTesting static LocalImage cacheDockerImageTar( BuildContext buildContext, Path tarPath, ProgressEventDispatcher.Factory progressEventDispatcherFactory, TempDirectoryProvider tempDirectoryProvider) throws IOException, LayerCountMismatchException { ExecutorService executorService = buildContext.getExecutorService(); Path destination = tempDirectoryProvider.newDirectory(); try (TimerEventDispatcher ignored = new TimerEventDispatcher( buildContext.getEventHandlers(), "Extracting tar " + tarPath + " into " + destination)) { TarExtractor.extract(tarPath, destination); DockerManifestEntryTemplate loadManifest; try (InputStream manifestStream = Files.newInputStream(destination.resolve("manifest.json"))) { loadManifest = JsonMapper.builder() .configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true) .build() .readValue(manifestStream, DockerManifestEntryTemplate[].class)[0]; } Path configPath = destination.resolve(loadManifest.getConfig()); ContainerConfigurationTemplate configurationTemplate = JsonTemplateMapper.readJsonFromFile(configPath, ContainerConfigurationTemplate.class); // Don't compute the digest of the loaded Java JSON instance. BlobDescriptor originalConfigDescriptor = Blobs.from(configPath).writeTo(ByteStreams.nullOutputStream()); List<String> layerFiles = loadManifest.getLayerFiles(); if (configurationTemplate.getLayerCount() != layerFiles.size()) { throw new LayerCountMismatchException( "Invalid base image format: manifest contains " + layerFiles.size() + " layers, but container configuration contains " + configurationTemplate.getLayerCount() + " layers"); } buildContext .getBaseImageLayersCache() .writeLocalConfig(originalConfigDescriptor.getDigest(), configurationTemplate); // Check the first layer to see if the layers are compressed already. 'docker save' output // is uncompressed, but a jib-built tar has compressed layers. boolean layersAreCompressed = !layerFiles.isEmpty() && isGzipped(destination.resolve(layerFiles.get(0))); // Process layer blobs try (ProgressEventDispatcher progressEventDispatcher = progressEventDispatcherFactory.create( "processing base image layers", layerFiles.size())) { // Start compressing layers in parallel List<Future<PreparedLayer>> preparedLayers = new ArrayList<>(); for (int index = 0; index < layerFiles.size(); index++) { Path layerFile = destination.resolve(layerFiles.get(index)); DescriptorDigest diffId = configurationTemplate.getLayerDiffId(index); ProgressEventDispatcher.Factory layerProgressDispatcherFactory = progressEventDispatcher.newChildProducer(); preparedLayers.add( executorService.submit( () -> compressAndCacheTarLayer( buildContext.getBaseImageLayersCache(), diffId, layerFile, layersAreCompressed, layerProgressDispatcherFactory))); } return new LocalImage(preparedLayers, configurationTemplate); } } }
@Test public void testCacheDockerImageTar_validDocker() throws Exception { Path dockerBuild = getResource("core/extraction/docker-save.tar"); LocalImage result = LocalBaseImageSteps.cacheDockerImageTar( buildContext, dockerBuild, progressEventDispatcherFactory, tempDirectoryProvider); Mockito.verify(progressEventDispatcher, Mockito.times(2)).newChildProducer(); Assert.assertEquals(2, result.layers.size()); Assert.assertEquals( "5e701122d3347fae0758cd5b7f0692c686fcd07b0e7fd9c4a125fbdbbedc04dd", result.layers.get(0).get().getDiffId().getHash()); Assert.assertEquals( "0011328ac5dfe3dde40c7c5e0e00c98d1833a3aeae2bfb668cf9eb965c229c7f", result.layers.get(0).get().getBlobDescriptor().getDigest().getHash()); Assert.assertEquals( "f1ac3015bcbf0ada4750d728626eb10f0f585199e2b667dcd79e49f0e926178e", result.layers.get(1).get().getDiffId().getHash()); Assert.assertEquals( "c10ef24a5cef5092bbcb5a5666721cff7b86ce978c203a958d1fc86ee6c19f94", result.layers.get(1).get().getBlobDescriptor().getDigest().getHash()); Assert.assertEquals(2, result.configurationTemplate.getLayerCount()); }
List<?> apply( final GenericRow row, final ProcessingLogger processingLogger ) { final Object[] args = new Object[parameterExtractors.size()]; for (int i = 0; i < parameterExtractors.size(); i++) { args[i] = evalParam(row, processingLogger, i); } try { final List<?> result = tableFunction.apply(args); if (result == null) { processingLogger.error(RecordProcessingError.recordProcessingError(nullMsg, row)); return ImmutableList.of(); } return result; } catch (final Exception e) { processingLogger.error(RecordProcessingError.recordProcessingError(exceptionMsg, e, row)); return ImmutableList.of(); } }
@Test public void shouldLogProcessingErrorIfUdtfThrows() { // Given: final RuntimeException e = new RuntimeException("Boom"); when(tableFunction.apply(any())).thenThrow(e); // When: applier.apply(VALUE, processingLogger); // Then: verify(processingLogger).error(RecordProcessingError.recordProcessingError( "Table function SOME_FUNC threw an exception", e, VALUE )); }
protected boolean isMacAddress(String field, FieldPresence presence) { return isMacAddress(object, field, presence); }
@Test public void isMacAddress() { assertTrue("is not proper mac", cfg.isMacAddress(MAC, MANDATORY)); assertTrue("is not proper mac", cfg.isMacAddress(MAC, OPTIONAL)); assertTrue("is not proper mac", cfg.isMacAddress("none", OPTIONAL)); assertTrue("did not detect missing field", expectInvalidField(() -> cfg.isMacAddress("none", MANDATORY))); assertTrue("did not detect bad ip", expectInvalidField(() -> cfg.isMacAddress(BAD_MAC, MANDATORY))); }
public static void writeUnsignedIntLE(OutputStream out, int value) throws IOException { out.write(value); out.write(value >>> 8); out.write(value >>> 16); out.write(value >>> 24); }
@Test public void testWriteUnsignedIntLEToOutputStream() throws IOException { int value1 = 0x04030201; ByteArrayOutputStream os1 = new ByteArrayOutputStream(); ByteUtils.writeUnsignedIntLE(os1, value1); ByteUtils.writeUnsignedIntLE(os1, value1); assertArrayEquals(new byte[] {0x01, 0x02, 0x03, 0x04, 0x01, 0x02, 0x03, 0x04}, os1.toByteArray()); int value2 = 0xf4f3f2f1; ByteArrayOutputStream os2 = new ByteArrayOutputStream(); ByteUtils.writeUnsignedIntLE(os2, value2); assertArrayEquals(new byte[] {(byte) 0xf1, (byte) 0xf2, (byte) 0xf3, (byte) 0xf4}, os2.toByteArray()); }
public double calculateDensity(Graph graph, boolean isGraphDirected) { double result; double edgesCount = graph.getEdgeCount(); double nodesCount = graph.getNodeCount(); double multiplier = 1; if (!isGraphDirected) { multiplier = 2; } result = (multiplier * edgesCount) / (nodesCount * nodesCount - nodesCount); return result; }
@Test public void testDirectedCompleteGraphWithSelfLoopsDensity() { GraphModel graphModel = GraphGenerator.generateCompleteDirectedGraph(3); DirectedGraph directedGraph = graphModel.getDirectedGraph(); Node n1 = directedGraph.getNode("0"); Node n2 = directedGraph.getNode("1"); Node n3 = directedGraph.getNode("2"); Edge currentEdge = graphModel.factory().newEdge(n1, n1); directedGraph.addEdge(currentEdge); currentEdge = graphModel.factory().newEdge(n2, n2); directedGraph.addEdge(currentEdge); currentEdge = graphModel.factory().newEdge(n3, n3); directedGraph.addEdge(currentEdge); DirectedGraph graph = graphModel.getDirectedGraph(); GraphDensity d = new GraphDensity(); double density = d.calculateDensity(graph, true); assertEquals(density, 1.5); }
@Override public ImportResult importItem(UUID jobId, IdempotentImportExecutor idempotentExecutor, AD authData, MediaContainerResource data) throws Exception { PhotosContainerResource photosResource = MediaContainerResource.mediaToPhoto(data); ImportResult photosResult = photosImporter .importItem(jobId, idempotentExecutor, authData, photosResource); VideosContainerResource videosResource = MediaContainerResource.mediaToVideo(data); ImportResult videosResult = videosImporter .importItem(jobId, idempotentExecutor, authData, videosResource); return ImportResult.merge(photosResult, videosResult); }
@Test public void shouldHandleErrorInVideos() throws Exception { Exception throwable = new Exception(); mediaImporter = new MediaImporterDecorator<>(photosImporter, (id, ex, ad, data) -> new ImportResult(throwable)); MediaContainerResource mcr = new MediaContainerResource(albums, photos, videos); ImportResult res = mediaImporter.importItem(null, null, null, mcr); assertEquals(new ImportResult(throwable), res); }
public static KeyStore buildKeyStore(Path certChainFile, Path keyFile, char[] keyPasswordChars) throws GeneralSecurityException { try { return doBuildKeyStore(certChainFile, keyFile, keyPasswordChars); } catch (KeyStoreException | NoSuchAlgorithmException | InvalidKeySpecException | CertificateException | KeyException | IOException | PKCSException | OperatorCreationException e) { throw new GeneralSecurityException(e); } }
@Test public void testBuildKeyStoreWithPBES2EncryptedPrivateKey() throws Exception { final Path certChainFile = Paths.get(Resources.getResource("org/graylog2/shared/security/tls/chain.crt").toURI()); final Path keyFile = Paths.get(Resources.getResource("org/graylog2/shared/security/tls/key-enc-pbe2-sha256.p8").toURI()); final KeyStore keyStore = PemKeyStore.buildKeyStore(certChainFile, keyFile, "password".toCharArray()); final Certificate[] keys = keyStore.getCertificateChain("key"); assertThat(keys).hasSize(2); final Key key = keyStore.getKey("key", "password".toCharArray()); assertThat(key.getFormat()).isEqualTo("PKCS#8"); assertThat(key.getEncoded()).isNotEmpty(); }
public static Collection<PerStepNamespaceMetrics> convert( String stepName, Map<MetricName, Long> counters, Map<MetricName, LockFreeHistogram.Snapshot> histograms, Map<MetricName, LabeledMetricNameUtils.ParsedMetricName> parsedPerWorkerMetricsCache) { Map<String, PerStepNamespaceMetrics> metricsByNamespace = new HashMap<>(); for (Entry<MetricName, Long> entry : counters.entrySet()) { MetricName metricName = entry.getKey(); Optional<MetricValue> metricValue = convertCounterToMetricValue(metricName, entry.getValue(), parsedPerWorkerMetricsCache); if (!metricValue.isPresent()) { continue; } PerStepNamespaceMetrics stepNamespaceMetrics = metricsByNamespace.get(metricName.getNamespace()); if (stepNamespaceMetrics == null) { stepNamespaceMetrics = new PerStepNamespaceMetrics() .setMetricValues(new ArrayList<>()) .setOriginalStep(stepName) .setMetricsNamespace(metricName.getNamespace()); metricsByNamespace.put(metricName.getNamespace(), stepNamespaceMetrics); } stepNamespaceMetrics.getMetricValues().add(metricValue.get()); } for (Entry<MetricName, LockFreeHistogram.Snapshot> entry : histograms.entrySet()) { MetricName metricName = entry.getKey(); Optional<MetricValue> metricValue = convertHistogramToMetricValue(metricName, entry.getValue(), parsedPerWorkerMetricsCache); if (!metricValue.isPresent()) { continue; } PerStepNamespaceMetrics stepNamespaceMetrics = metricsByNamespace.get(metricName.getNamespace()); if (stepNamespaceMetrics == null) { stepNamespaceMetrics = new PerStepNamespaceMetrics() .setMetricValues(new ArrayList<>()) .setOriginalStep(stepName) .setMetricsNamespace(metricName.getNamespace()); metricsByNamespace.put(metricName.getNamespace(), stepNamespaceMetrics); } stepNamespaceMetrics.getMetricValues().add(metricValue.get()); } return metricsByNamespace.values(); }
@Test public void testConvert_convertCountersAndHistograms() { String step = "testStep"; Map<MetricName, Long> counters = new HashMap<>(); Map<MetricName, LockFreeHistogram.Snapshot> histograms = new HashMap<>(); Map<MetricName, LabeledMetricNameUtils.ParsedMetricName> parsedMetricNames = new HashMap<>(); MetricName counterMetricName = MetricName.named("BigQuerySink", "counter*label1:val1;"); counters.put(counterMetricName, 3L); MetricName histogramMetricName = MetricName.named("BigQuerySink", "histogram*label2:val2;"); LockFreeHistogram linearHistogram = new LockFreeHistogram(histogramMetricName, lienarBuckets); linearHistogram.update(5.0); histograms.put(histogramMetricName, linearHistogram.getSnapshotAndReset().get()); Collection<PerStepNamespaceMetrics> conversionResult = MetricsToPerStepNamespaceMetricsConverter.convert( step, counters, histograms, parsedMetricNames); // Expected counter MetricValue Map<String, String> counterLabelMap = new HashMap<>(); counterLabelMap.put("label1", "val1"); MetricValue expectedCounter = new MetricValue().setMetric("counter").setValueInt64(3L).setMetricLabels(counterLabelMap); // Expected histogram MetricValue List<Long> bucketCounts1 = ImmutableList.of(1L); Linear linearOptions1 = new Linear().setNumberOfBuckets(10).setWidth(10.0).setStart(0.0); BucketOptions bucketOptions1 = new BucketOptions().setLinear(linearOptions1); DataflowHistogramValue linearHistogram1 = new DataflowHistogramValue() .setCount(1L) .setBucketOptions(bucketOptions1) .setBucketCounts(bucketCounts1); Map<String, String> histogramLabelMap = new HashMap<>(); histogramLabelMap.put("label2", "val2"); MetricValue expectedHistogram = new MetricValue() .setMetric("histogram") .setMetricLabels(histogramLabelMap) .setValueHistogram(linearHistogram1); assertThat(conversionResult.size(), equalTo(1)); PerStepNamespaceMetrics perStepNamespaceMetrics = conversionResult.iterator().next(); assertThat(perStepNamespaceMetrics.getOriginalStep(), equalTo(step)); assertThat(perStepNamespaceMetrics.getMetricsNamespace(), equalTo("BigQuerySink")); assertThat(perStepNamespaceMetrics.getMetricValues().size(), equalTo(2)); assertThat( perStepNamespaceMetrics.getMetricValues(), containsInAnyOrder(expectedCounter, expectedHistogram)); // Verify that parsedMetricNames have been cached. LabeledMetricNameUtils.ParsedMetricName parsedCounterMetricName = LabeledMetricNameUtils.parseMetricName(counterMetricName.getName()).get(); LabeledMetricNameUtils.ParsedMetricName parsedHistogramMetricName = LabeledMetricNameUtils.parseMetricName(histogramMetricName.getName()).get(); assertThat(parsedMetricNames.size(), equalTo(2)); assertThat( parsedMetricNames, IsMapContaining.hasEntry(counterMetricName, parsedCounterMetricName)); assertThat( parsedMetricNames, IsMapContaining.hasEntry(histogramMetricName, parsedHistogramMetricName)); }
public static PathData[] expandAsGlob(String pattern, Configuration conf) throws IOException { Path globPath = new Path(pattern); FileSystem fs = globPath.getFileSystem(conf); FileStatus[] stats = fs.globStatus(globPath); PathData[] items = null; if (stats == null) { // remove any quoting in the glob pattern pattern = pattern.replaceAll("\\\\(.)", "$1"); // not a glob & file not found, so add the path with a null stat items = new PathData[]{ new PathData(fs, pattern, null) }; } else { // figure out what type of glob path was given, will convert globbed // paths to match the type to preserve relativity PathType globType; URI globUri = globPath.toUri(); if (globUri.getScheme() != null) { globType = PathType.HAS_SCHEME; } else if (!globUri.getPath().isEmpty() && new Path(globUri.getPath()).isAbsolute()) { globType = PathType.SCHEMELESS_ABSOLUTE; } else { globType = PathType.RELATIVE; } // convert stats to PathData items = new PathData[stats.length]; int i=0; for (FileStatus stat : stats) { URI matchUri = stat.getPath().toUri(); String globMatch = null; switch (globType) { case HAS_SCHEME: // use as-is, but remove authority if necessary if (globUri.getAuthority() == null) { matchUri = removeAuthority(matchUri); } globMatch = uriToString(matchUri, false); break; case SCHEMELESS_ABSOLUTE: // take just the uri's path globMatch = matchUri.getPath(); break; case RELATIVE: // make it relative to the current working dir URI cwdUri = fs.getWorkingDirectory().toUri(); globMatch = relativize(cwdUri, matchUri, stat.isDirectory()); break; } items[i++] = new PathData(fs, globMatch, stat); } } Arrays.sort(items); return items; }
@Test (timeout = 30000) public void testRelativeGlobBack() throws Exception { fs.setWorkingDirectory(new Path("d1")); PathData[] items = PathData.expandAsGlob("../d2/*", conf); assertEquals( sortedString("../d2/f3"), sortedString(items) ); }
public static boolean isEmpty(final byte[] bytes) { return bytes == null || bytes.length == 0; }
@SuppressWarnings("ConstantConditions") @Test public void testIsEmpty() { Assert.assertTrue(BytesUtil.isEmpty(null)); Assert.assertFalse(BytesUtil.isEmpty(new byte[] { 1, 2 })); }
@Override public String[] split(String text) { if (splitContraction) { text = WONT_CONTRACTION.matcher(text).replaceAll("$1ill not"); text = SHANT_CONTRACTION.matcher(text).replaceAll("$1ll not"); text = AINT_CONTRACTION.matcher(text).replaceAll("$1m not"); for (Pattern regexp : NOT_CONTRACTIONS) { text = regexp.matcher(text).replaceAll("$1 not"); } for (Pattern regexp : CONTRACTIONS2) { text = regexp.matcher(text).replaceAll("$1 $2"); } for (Pattern regexp : CONTRACTIONS3) { text = regexp.matcher(text).replaceAll("$1 $2 $3"); } } text = DELIMITERS[0].matcher(text).replaceAll(" $1 "); text = DELIMITERS[1].matcher(text).replaceAll(" $1"); text = DELIMITERS[2].matcher(text).replaceAll(" $1"); text = DELIMITERS[3].matcher(text).replaceAll(" . "); text = DELIMITERS[4].matcher(text).replaceAll(" $1 "); String[] words = WHITESPACE.split(text); if (words.length > 1 && words[words.length-1].equals(".")) { if (EnglishAbbreviations.contains(words[words.length-2])) { words[words.length-2] = words[words.length-2] + "."; } } ArrayList<String> result = new ArrayList<>(); for (String token : words) { if (!token.isEmpty()) { result.add(token); } } return result.toArray(new String[0]); }
@Test public void testTokenizeToC() { System.out.println("tokenize 1.2 Interpretation.....................................................................................................................3"); // No-break space and em-space String text = "1.2 Interpretation.....................................................................................................................3"; String[] expResult = {"1.2", "Interpretation", ".....................................................................................................................", "3"}; SimpleTokenizer instance = new SimpleTokenizer(); String[] result = instance.split(text); assertEquals(expResult.length, result.length); for (int i = 0; i < result.length; i++) { assertEquals(expResult[i], result[i]); } }
public Map<String, List<TopicPartitionInfo>> getTopicPartitionInfo(final Set<String> topics) { log.debug("Starting to describe topics {} in partition assignor.", topics); long currentWallClockMs = time.milliseconds(); final long deadlineMs = currentWallClockMs + retryTimeoutMs; final Set<String> topicsToDescribe = new HashSet<>(topics); final Map<String, List<TopicPartitionInfo>> topicPartitionInfo = new HashMap<>(); while (!topicsToDescribe.isEmpty()) { final Map<String, List<TopicPartitionInfo>> existed = getTopicPartitionInfo(topicsToDescribe, null); topicPartitionInfo.putAll(existed); topicsToDescribe.removeAll(topicPartitionInfo.keySet()); if (!topicsToDescribe.isEmpty()) { currentWallClockMs = time.milliseconds(); if (currentWallClockMs >= deadlineMs) { final String timeoutError = String.format( "Could not create topics within %d milliseconds. " + "This can happen if the Kafka cluster is temporarily not available.", retryTimeoutMs); log.error(timeoutError); throw new TimeoutException(timeoutError); } log.info( "Topics {} could not be describe fully. Will retry in {} milliseconds. Remaining time in milliseconds: {}", topics, retryBackOffMs, deadlineMs - currentWallClockMs ); Utils.sleep(retryBackOffMs); } } log.debug("Completed describing topics"); return topicPartitionInfo; }
@Test public void shouldThrowTimeoutExceptionInGetPartitionInfo() { setupTopicInMockAdminClient(topic1, Collections.emptyMap()); final MockTime time = new MockTime(5); mockAdminClient.timeoutNextRequest(Integer.MAX_VALUE); final InternalTopicManager internalTopicManager = new InternalTopicManager(time, mockAdminClient, new StreamsConfig(config)); final TimeoutException exception = assertThrows( TimeoutException.class, () -> internalTopicManager.getTopicPartitionInfo(Collections.singleton(topic1)) ); assertThat( exception.getMessage(), is("Could not create topics within 50 milliseconds. This can happen if the Kafka cluster is temporarily not available.") ); }
public String convert(ILoggingEvent le) { long timestamp = le.getTimeStamp(); return cachingDateFormatter.format(timestamp); }
@Test public void convertsDateInSpecifiedTimeZoneAsTzid() { assertEquals(formatDate("CST"), convert(_timestamp, DATETIME_PATTERN, "CST")); }
public Reactor start() { if (!STATE.compareAndSet(Reactor.this, NEW, RUNNING)) { throw new IllegalStateException("Can't start reactor, invalid state:" + state); } startLatch.countDown(); return this; }
@Test(expected = IllegalStateException.class) public void test_start_whenAlreadyStarted() { Reactor reactor = newReactor(); reactor.start(); reactor.start(); }
public static Collection<PerStepNamespaceMetrics> convert( String stepName, Map<MetricName, Long> counters, Map<MetricName, LockFreeHistogram.Snapshot> histograms, Map<MetricName, LabeledMetricNameUtils.ParsedMetricName> parsedPerWorkerMetricsCache) { Map<String, PerStepNamespaceMetrics> metricsByNamespace = new HashMap<>(); for (Entry<MetricName, Long> entry : counters.entrySet()) { MetricName metricName = entry.getKey(); Optional<MetricValue> metricValue = convertCounterToMetricValue(metricName, entry.getValue(), parsedPerWorkerMetricsCache); if (!metricValue.isPresent()) { continue; } PerStepNamespaceMetrics stepNamespaceMetrics = metricsByNamespace.get(metricName.getNamespace()); if (stepNamespaceMetrics == null) { stepNamespaceMetrics = new PerStepNamespaceMetrics() .setMetricValues(new ArrayList<>()) .setOriginalStep(stepName) .setMetricsNamespace(metricName.getNamespace()); metricsByNamespace.put(metricName.getNamespace(), stepNamespaceMetrics); } stepNamespaceMetrics.getMetricValues().add(metricValue.get()); } for (Entry<MetricName, LockFreeHistogram.Snapshot> entry : histograms.entrySet()) { MetricName metricName = entry.getKey(); Optional<MetricValue> metricValue = convertHistogramToMetricValue(metricName, entry.getValue(), parsedPerWorkerMetricsCache); if (!metricValue.isPresent()) { continue; } PerStepNamespaceMetrics stepNamespaceMetrics = metricsByNamespace.get(metricName.getNamespace()); if (stepNamespaceMetrics == null) { stepNamespaceMetrics = new PerStepNamespaceMetrics() .setMetricValues(new ArrayList<>()) .setOriginalStep(stepName) .setMetricsNamespace(metricName.getNamespace()); metricsByNamespace.put(metricName.getNamespace(), stepNamespaceMetrics); } stepNamespaceMetrics.getMetricValues().add(metricValue.get()); } return metricsByNamespace.values(); }
@Test public void testConvert_successfulyConvertCounters() { String step = "testStepName"; Map<MetricName, LockFreeHistogram.Snapshot> emptyHistograms = new HashMap<>(); Map<MetricName, Long> counters = new HashMap<MetricName, Long>(); Map<MetricName, LabeledMetricNameUtils.ParsedMetricName> parsedMetricNames = new HashMap<>(); MetricName bigQueryMetric1 = MetricName.named("BigQuerySink", "metric1"); MetricName bigQueryMetric2 = MetricName.named("BigQuerySink", "metric2*label1:val1;label2:val2;"); MetricName bigQueryMetric3 = MetricName.named("BigQuerySink", "zeroValue"); counters.put(bigQueryMetric1, 5L); counters.put(bigQueryMetric2, 10L); counters.put(bigQueryMetric3, 0L); Collection<PerStepNamespaceMetrics> conversionResult = MetricsToPerStepNamespaceMetricsConverter.convert( step, counters, emptyHistograms, parsedMetricNames); MetricValue expectedVal1 = new MetricValue().setMetric("metric1").setValueInt64(5L).setMetricLabels(new HashMap<>()); Map<String, String> val2LabelMap = new HashMap<>(); val2LabelMap.put("label1", "val1"); val2LabelMap.put("label2", "val2"); MetricValue expectedVal2 = new MetricValue().setMetric("metric2").setValueInt64(10L).setMetricLabels(val2LabelMap); assertThat(conversionResult.size(), equalTo(1)); PerStepNamespaceMetrics perStepNamespaceMetrics = conversionResult.iterator().next(); assertThat(perStepNamespaceMetrics.getOriginalStep(), equalTo(step)); assertThat(perStepNamespaceMetrics.getMetricsNamespace(), equalTo("BigQuerySink")); assertThat(perStepNamespaceMetrics.getMetricValues().size(), equalTo(2)); assertThat( perStepNamespaceMetrics.getMetricValues(), containsInAnyOrder(expectedVal1, expectedVal2)); LabeledMetricNameUtils.ParsedMetricName parsedBigQueryMetric1 = LabeledMetricNameUtils.parseMetricName(bigQueryMetric1.getName()).get(); LabeledMetricNameUtils.ParsedMetricName parsedBigQueryMetric2 = LabeledMetricNameUtils.parseMetricName(bigQueryMetric2.getName()).get(); assertThat(parsedMetricNames.size(), equalTo(2)); assertThat(parsedMetricNames, IsMapContaining.hasEntry(bigQueryMetric1, parsedBigQueryMetric1)); assertThat(parsedMetricNames, IsMapContaining.hasEntry(bigQueryMetric2, parsedBigQueryMetric2)); }
public static int checkNotNegative(int value, String paramName) { if (value < 0) { throw new IllegalArgumentException(paramName + " is " + value + " but must be >= 0"); } return value; }
@Test public void test_checkNotNegative_whenPositive() { checkNotNegative(1, "foo"); }
@Override public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer) { return aggregate(initializer, Materialized.with(null, null)); }
@Test public void shouldNotHaveNullInitializerThreeOptionOnAggregate() { assertThrows(NullPointerException.class, () -> windowedCogroupedStream.aggregate(null, Named.as("test"), Materialized.as("test"))); }
public static Optional<String> getDatabaseName(final String configNodeFullPath) { Pattern pattern = Pattern.compile(getShardingSphereDataNodePath() + "/([\\w\\-]+)$", Pattern.CASE_INSENSITIVE); Matcher matcher = pattern.matcher(configNodeFullPath); return matcher.find() ? Optional.of(matcher.group(1)) : Optional.empty(); }
@Test void assertGetDatabaseNameHappyPath() { assertThat(ShardingSphereDataNode.getDatabaseName("/statistics/databases/db_name"), is(Optional.of("db_name"))); }
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { // Check for the magic header signature byte[] signature = new byte[4]; IOUtils.readFully(stream, signature); if (signature[0] == (byte) '8' && signature[1] == (byte) 'B' && signature[2] == (byte) 'P' && signature[3] == (byte) 'S') { // Good, signature found } else { throw new TikaException("PSD/PSB magic signature invalid"); } // Check the version int version = EndianUtils.readUShortBE(stream); if (version == 1 || version == 2) { // Good, we support these two } else { throw new TikaException("Invalid PSD/PSB version " + version); } // Skip the reserved block IOUtils.readFully(stream, new byte[6]); // Number of channels in the image int numChannels = EndianUtils.readUShortBE(stream); // TODO Identify a suitable metadata key for this // Width and Height int height = EndianUtils.readIntBE(stream); int width = EndianUtils.readIntBE(stream); metadata.set(TIFF.IMAGE_LENGTH, height); metadata.set(TIFF.IMAGE_WIDTH, width); // Depth (bits per channel) int depth = EndianUtils.readUShortBE(stream); metadata.set(TIFF.BITS_PER_SAMPLE, Integer.toString(depth)); // Colour mode, eg Bitmap or RGB int colorMode = EndianUtils.readUShortBE(stream); if (colorMode < Photoshop._COLOR_MODE_CHOICES_INDEXED.length) { metadata.set(Photoshop.COLOR_MODE, Photoshop._COLOR_MODE_CHOICES_INDEXED[colorMode]); } // Next is the Color Mode section // We don't care about this bit long colorModeSectionSize = EndianUtils.readIntBE(stream); IOUtils.skipFully(stream, colorModeSectionSize); // Next is the Image Resources section // Check for certain interesting keys here long imageResourcesSectionSize = EndianUtils.readIntBE(stream); long read = 0; //if something is corrupt about this number, prevent an //infinite loop by only reading 10000 blocks int blocks = 0; while (read < imageResourcesSectionSize && blocks < MAX_BLOCKS) { ResourceBlock rb = new ResourceBlock(stream, maxDataLengthBytes); if (rb.totalLength <= 0) { //break; } read += rb.totalLength; // Is it one we can do something useful with? if (rb.id == ResourceBlock.ID_CAPTION) { metadata.add(TikaCoreProperties.DESCRIPTION, rb.getDataAsString()); } else if (rb.id == ResourceBlock.ID_EXIF_1) { // TODO Parse the EXIF info via ImageMetadataExtractor } else if (rb.id == ResourceBlock.ID_EXIF_3) { // TODO Parse the EXIF info via ImageMetadataExtractor } else if (rb.id == ResourceBlock.ID_XMP) { //if there are multiple xmps in a file, this will //overwrite the data from the earlier xmp JempboxExtractor ex = new JempboxExtractor(metadata); ex.parse(UnsynchronizedByteArrayInputStream.builder().setByteArray(rb.data).get()); } blocks++; } // Next is the Layer and Mask Info // Finally we have Image Data // We can't do anything with these parts // We don't have any helpful text, sorry... XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); xhtml.startDocument(); xhtml.endDocument(); }
@Test public void testPSD() throws Exception { Metadata metadata = new Metadata(); metadata.set(Metadata.CONTENT_TYPE, "image/x-psd"); try (InputStream stream = getResourceAsStream("/test-documents/testPSD.psd")) { parser.parse(stream, new DefaultHandler(), metadata, new ParseContext()); } assertEquals("537", metadata.get(Metadata.IMAGE_WIDTH)); assertEquals("51", metadata.get(Metadata.IMAGE_LENGTH)); assertEquals("8", metadata.get(Metadata.BITS_PER_SAMPLE)); }
@Override public boolean supportsUpsert() { return true; }
@Test void postgres_does_supportUpsert() { assertThat(underTest.supportsUpsert()).isTrue(); }
public ClientTelemetrySender telemetrySender() { return clientTelemetrySender; }
@Test public void testClientInstanceId() throws InterruptedException { ClientTelemetryReporter.DefaultClientTelemetrySender telemetrySender = (ClientTelemetryReporter.DefaultClientTelemetrySender) clientTelemetryReporter.telemetrySender(); assertTrue(telemetrySender.maybeSetState(ClientTelemetryState.SUBSCRIPTION_IN_PROGRESS)); CountDownLatch lock = new CountDownLatch(2); AtomicReference<Optional<Uuid>> clientInstanceId = new AtomicReference<>(); new Thread(() -> { try { clientInstanceId.set(telemetrySender.clientInstanceId(Duration.ofMillis(10000))); } finally { lock.countDown(); } }).start(); new Thread(() -> { try { telemetrySender.updateSubscriptionResult(subscription, time.milliseconds()); } finally { lock.countDown(); } }).start(); assertTrue(lock.await(2000, TimeUnit.MILLISECONDS)); assertNotNull(clientInstanceId.get()); assertTrue(clientInstanceId.get().isPresent()); assertEquals(uuid, clientInstanceId.get().get()); }
static ApiError validateQuotaKeyValue( Map<String, ConfigDef.ConfigKey> validKeys, String key, double value ) { // Ensure we have an allowed quota key ConfigDef.ConfigKey configKey = validKeys.get(key); if (configKey == null) { return new ApiError(Errors.INVALID_REQUEST, "Invalid configuration key " + key); } if (value <= 0.0) { return new ApiError(Errors.INVALID_REQUEST, "Quota " + key + " must be greater than 0"); } // Ensure the quota value is valid switch (configKey.type()) { case DOUBLE: return ApiError.NONE; case SHORT: if (value > Short.MAX_VALUE) { return new ApiError(Errors.INVALID_REQUEST, "Proposed value for " + key + " is too large for a SHORT."); } return getErrorForIntegralQuotaValue(value, key); case INT: if (value > Integer.MAX_VALUE) { return new ApiError(Errors.INVALID_REQUEST, "Proposed value for " + key + " is too large for an INT."); } return getErrorForIntegralQuotaValue(value, key); case LONG: { if (value > Long.MAX_VALUE) { return new ApiError(Errors.INVALID_REQUEST, "Proposed value for " + key + " is too large for a LONG."); } return getErrorForIntegralQuotaValue(value, key); } default: return new ApiError(Errors.UNKNOWN_SERVER_ERROR, "Unexpected config type " + configKey.type() + " should be Long or Double"); } }
@Test public void testValidateQuotaKeyValueForFractionalConsumerByteRate() { assertEquals(new ApiError(Errors.INVALID_REQUEST, "consumer_byte_rate cannot be a fractional value."), ClientQuotaControlManager.validateQuotaKeyValue( VALID_CLIENT_ID_QUOTA_KEYS, "consumer_byte_rate", 2.245)); }
static int initAllDumpPageSize() { long memLimitMB = getMemLimitMB(); //512MB->50 Page Size int pageSize = (int) ((float) memLimitMB / PAGE_MEMORY_DIVIDE_MB) * MIN_DUMP_PAGE; pageSize = Math.max(pageSize, MIN_DUMP_PAGE); pageSize = Math.min(pageSize, MAX_DUMP_PAGE); LOGGER.info("All dump page size is set to {} according to mem limit {} MB", pageSize, memLimitMB); return pageSize; }
@Test void testGetAllDumpPageSizeWithJvmArgs() throws Exception { File file = new File(mockMem); if (file.exists()) { file.delete(); } int allDumpPageSizeUnderMin = PropertyUtil.initAllDumpPageSize(); long maxMem = Runtime.getRuntime().maxMemory(); long pageSize = maxMem / 1024 / 1024 / 512 * 50; if (pageSize < 50) { assertEquals(50, allDumpPageSizeUnderMin); } else if (pageSize > 1000) { assertEquals(1000, allDumpPageSizeUnderMin); } else { assertEquals(pageSize, allDumpPageSizeUnderMin); } }
public static String getDbTypeFromDataSource(DataSource dataSource) throws SQLException { try (Connection con = dataSource.getConnection()) { DatabaseMetaData metaData = con.getMetaData(); return metaData.getDatabaseProductName(); } }
@Test public void testGetDbTypeFromDataSource() throws SQLException { Connection connection = Mockito.mock(Connection.class); DatabaseMetaData databaseMetaData = Mockito.mock(DatabaseMetaData.class); when(connection.getMetaData()).thenReturn(databaseMetaData); when(databaseMetaData.getDatabaseProductName()).thenReturn("test"); MockDataSource mockDataSource = new MockDataSource(); mockDataSource.setConnection(connection); Assertions.assertEquals(DbStateMachineConfig.getDbTypeFromDataSource(mockDataSource), "test"); }
public <T extends AbstractFetcher, C extends AbstractConfig> void createFetcher(T fetcher, C config) { fetchers.put(fetcher.getName(), fetcher); fetcherConfigs.put(fetcher.getName(), config); getFetcherAndLogAccess(fetcher.getName()); }
@Test void createFetcher() { try (ExpiringFetcherStore expiringFetcherStore = new ExpiringFetcherStore(1, 5)) { AbstractFetcher fetcher = new AbstractFetcher() { @Override public InputStream fetch(String fetchKey, Metadata metadata, ParseContext parseContext) { return null; } }; fetcher.setName("nick"); AbstractConfig config = new AbstractConfig() { }; expiringFetcherStore.createFetcher(fetcher, config); Assertions.assertNotNull(expiringFetcherStore .getFetchers() .get(fetcher.getName())); Awaitility .await() .atMost(Duration.ofSeconds(60)) .until(() -> expiringFetcherStore .getFetchers() .get(fetcher.getName()) == null); assertNull(expiringFetcherStore .getFetcherConfigs() .get(fetcher.getName())); } }
public void isNoneOf( @Nullable Object first, @Nullable Object second, @Nullable Object @Nullable ... rest) { isNotIn(accumulate(first, second, rest)); }
@Test public void isNoneOfFailure() { expectFailure.whenTesting().that("b").isNoneOf("a", "b", "c"); assertFailureKeys("expected not to be any of", "but was"); assertFailureValue("expected not to be any of", "[a, b, c]"); }
MessageDigest getMessageDigest() { try { return (MessageDigest) messageDigest.clone(); } catch (CloneNotSupportedException e) { throw new RuntimeException("Unsupported clone for MessageDigest.", e); } }
@Test void messageDigestReturnsDistinctInstances() { final MessageDigest md1 = testee.getMessageDigest(); final MessageDigest md2 = testee.getMessageDigest(); Assertions.assertThat(md1 != md2).isTrue(); Assertions.assertThat(md1.getAlgorithm()).isEqualTo(md2.getAlgorithm()); Assertions.assertThat(md1.getDigestLength()).isEqualTo(md2.getDigestLength()); Assertions.assertThat(md1.getProvider()).isEqualTo(md2.getProvider()); Assertions.assertThat(md1.toString()).isEqualTo(md2.toString()); }
@Override public int hashCode() { return intValue; }
@Test public void testHashCode() throws Exception { for (DnsRecordType t : allTypes()) { assertEquals(t.intValue(), t.hashCode()); } }
public Set<Long> register(final StorageTierAssoc globalStorageTierAssoc, final List<String> storageTierAliases, final Map<String, Long> totalBytesOnTiers, final Map<String, Long> usedBytesOnTiers, final Set<Long> blocks) { mUsage.updateUsage(globalStorageTierAssoc, storageTierAliases, totalBytesOnTiers, usedBytesOnTiers); Set<Long> removedBlocks; if (mIsRegistered) { // This is a re-register of an existing worker. Assume the new block ownership data is more // up-to-date and update the existing block information. LOG.info("re-registering an existing workerId: {}", mMeta.mId); // Compute the difference between the existing block data, and the new data. removedBlocks = Sets.difference(mBlocks, blocks); } else { removedBlocks = Collections.emptySet(); } // Set the new block information. mBlocks = blocks; mIsRegistered = true; return removedBlocks; }
@Test public void registerWithDifferentNumberOfTiers() { mThrown.expect(IllegalArgumentException.class); mThrown.expectMessage("totalBytesOnTiers and usedBytesOnTiers should have the same number of" + " tiers as storageTierAliases, but storageTierAliases has 2 tiers, while" + " totalBytesOnTiers has 2 tiers and usedBytesOnTiers has 1 tiers"); mInfo.register(GLOBAL_STORAGE_TIER_ASSOC, STORAGE_TIER_ALIASES, TOTAL_BYTES_ON_TIERS, ImmutableMap.of(Constants.MEDIUM_SSD, (long) Constants.KB), NEW_BLOCKS); }
public static boolean hasVariable( String variable ) { if ( variable == null ) { return false; } return checkForVariableDelimeters( variable, UNIX_OPEN, UNIX_CLOSE ) || checkForVariableDelimeters( variable, WINDOWS_OPEN, WINDOWS_CLOSE ) || checkForVariableDelimeters( variable, HEX_OPEN, HEX_CLOSE ); }
@Test public void testHasVariable() { assertTrue( StringUtil.hasVariable( "abc${foo}" ) ); assertTrue( StringUtil.hasVariable( "abc%%foo%%efg" ) ); assertTrue( StringUtil.hasVariable( "$[foo]abc" ) ); assertFalse( "Open and close ordered improperly", StringUtil.hasVariable( "a}bc${foo" ) ); assertFalse( "Variable is not closed", StringUtil.hasVariable( "abc${foo" ) ); assertFalse( "Variable is not opened", StringUtil.hasVariable( "abcfoo}" ) ); assertFalse( "no variable present to substitute", StringUtil.hasVariable( "abc${}foo" ) ); }
public static boolean extractMinionAllowDownloadFromServer(TableConfig tableConfig, String taskType, boolean defaultValue) { TableTaskConfig tableTaskConfig = tableConfig.getTaskConfig(); if (tableTaskConfig != null) { Map<String, String> configs = tableTaskConfig.getConfigsForTaskType(taskType); if (configs != null && !configs.isEmpty()) { return Boolean.parseBoolean( configs.getOrDefault(TableTaskConfig.MINION_ALLOW_DOWNLOAD_FROM_SERVER, String.valueOf(defaultValue))); } } return defaultValue; }
@Test public void testExtractMinionAllowDownloadFromServer() { Map<String, String> configs = new HashMap<>(); TableTaskConfig tableTaskConfig = new TableTaskConfig( Collections.singletonMap(MinionConstants.MergeRollupTask.TASK_TYPE, configs)); TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName("sampleTable") .setTaskConfig(tableTaskConfig).build(); // Test when the configuration is not set, should return the default value which is false assertFalse(MinionTaskUtils.extractMinionAllowDownloadFromServer(tableConfig, MinionConstants.MergeRollupTask.TASK_TYPE, false)); // Test when the configuration is set to true configs.put(TableTaskConfig.MINION_ALLOW_DOWNLOAD_FROM_SERVER, "true"); tableTaskConfig = new TableTaskConfig(Collections.singletonMap(MinionConstants.MergeRollupTask.TASK_TYPE, configs)); tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName("sampleTable") .setTaskConfig(tableTaskConfig).build(); assertTrue(MinionTaskUtils.extractMinionAllowDownloadFromServer(tableConfig, MinionConstants.MergeRollupTask.TASK_TYPE, false)); // Test when the configuration is set to false configs.put(TableTaskConfig.MINION_ALLOW_DOWNLOAD_FROM_SERVER, "false"); tableTaskConfig = new TableTaskConfig(Collections.singletonMap(MinionConstants.MergeRollupTask.TASK_TYPE, configs)); tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName("sampleTable") .setTaskConfig(tableTaskConfig).build(); assertFalse(MinionTaskUtils.extractMinionAllowDownloadFromServer(tableConfig, MinionConstants.MergeRollupTask.TASK_TYPE, false)); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); final DSTOffset offset = readDSTOffset(data, 0); if (offset == null) { onInvalidDataReceived(device, data); return; } onDSTOffsetReceived(device, offset); }
@Test public void onDSTOffsetReceived_half() { final Data data = new Data(new byte[] { 2 }); callback.onDataReceived(null, data); assertTrue(success); assertSame(DSTOffsetCallback.DSTOffset.HALF_AN_HOUR_DAYLIGHT_TIME, result); }
@Override public DefaultAuthenticationContext build(Metadata metadata, GeneratedMessageV3 request) { try { DefaultAuthenticationContext context = new DefaultAuthenticationContext(); context.setChannelId(metadata.get(GrpcConstants.CHANNEL_ID)); context.setRpcCode(request.getDescriptorForType().getFullName()); String authorization = metadata.get(GrpcConstants.AUTHORIZATION); if (StringUtils.isEmpty(authorization)) { return context; } String datetime = metadata.get(GrpcConstants.DATE_TIME); if (StringUtils.isEmpty(datetime)) { throw new AuthenticationException("datetime is null."); } String[] result = authorization.split(CommonConstants.SPACE, 2); if (result.length != 2) { throw new AuthenticationException("authentication header is incorrect."); } String[] keyValues = result[1].split(CommonConstants.COMMA); for (String keyValue : keyValues) { String[] kv = keyValue.trim().split(CommonConstants.EQUAL, 2); int kvLength = kv.length; if (kv.length != 2) { throw new AuthenticationException("authentication keyValues length is incorrect, actual length={}.", kvLength); } String authItem = kv[0]; if (CREDENTIAL.equals(authItem)) { String[] credential = kv[1].split(CommonConstants.SLASH); int credentialActualLength = credential.length; if (credentialActualLength == 0) { throw new AuthenticationException("authentication credential length is incorrect, actual length={}.", credentialActualLength); } context.setUsername(credential[0]); continue; } if (SIGNATURE.equals(authItem)) { context.setSignature(this.hexToBase64(kv[1])); } } context.setContent(datetime.getBytes(StandardCharsets.UTF_8)); return context; } catch (AuthenticationException e) { throw e; } catch (Throwable e) { throw new AuthenticationException("create authentication context error.", e); } }
@Test public void build2() { when(channel.id()).thenReturn(mockChannelId("channel-id")); when(channelHandlerContext.channel()).thenReturn(channel); SendMessageRequestHeader requestHeader = new SendMessageRequestHeader(); requestHeader.setTopic("topic-test"); requestHeader.setQueueId(0); requestHeader.setBornTimestamp(117036786441330L); requestHeader.setBname("brokerName-1"); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.SEND_MESSAGE, requestHeader); request.setVersion(441); request.addExtField("AccessKey", "abc"); request.addExtField("Signature", "ZG26exJ5u9q1fwZlO4DCmz2Rs88="); request.makeCustomHeaderToNet(); DefaultAuthenticationContext context = builder.build(channelHandlerContext, request); Assert.assertNotNull(context); Assert.assertEquals("abc", context.getUsername()); Assert.assertEquals("ZG26exJ5u9q1fwZlO4DCmz2Rs88=", context.getSignature()); Assert.assertEquals("abcbrokerName-11170367864413300topic-test", new String(context.getContent(), StandardCharsets.UTF_8)); }
@Override public Set<String> codeVariants() { if (codeVariants == null) { return Set.of(); } else { return ImmutableSet.copyOf(codeVariants); } }
@Test void codeVariants_whenNull_shouldReturnEmptySet() { assertThat(issue.codeVariants()).isEmpty(); }
void fetchUpdateCheckHeaders(DownloadableFile downloadableFile) throws IOException, GeneralSecurityException { String url = downloadableFile.validatedUrl(urlGenerator); final HttpRequestBase request = new HttpHead(url); request.setConfig(RequestConfig.custom().setConnectTimeout(HTTP_TIMEOUT_IN_MILLISECONDS).build()); try ( CloseableHttpClient httpClient = httpClientBuilder.build(); CloseableHttpResponse response = httpClient.execute(request) ) { handleInvalidResponse(response, url); this.md5 = response.getFirstHeader(MD5_HEADER).getValue(); this.extraProperties = HeaderUtil.parseExtraProperties(response.getFirstHeader(AGENT_EXTRA_PROPERTIES_HEADER)); } }
@Test public void shouldFailIfServerIsNotAvailable() { ServerBinaryDownloader downloader = new ServerBinaryDownloader(new GoAgentServerHttpClientBuilder(null, SslVerificationMode.NONE, null, null, null), ServerUrlGeneratorMother.generatorWithoutSubPathFor("https://invalidserver:" + server.getSecurePort() + "/go")); assertThatThrownBy(() -> downloader.fetchUpdateCheckHeaders(DownloadableFile.AGENT)) .isExactlyInstanceOf(UnknownHostException.class) .hasMessageContaining("invalidserver"); }
@Override public void add(Double value) { this.count++; this.sum += value; }
@Test void testAdd() { AverageAccumulator average = new AverageAccumulator(); int i1; for (i1 = 0; i1 < 10; i1++) { average.add(i1); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); Integer i2; for (i2 = 0; i2 < 10; i2++) { average.add(i2); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); long i3; for (i3 = 0; i3 < 10; i3++) { average.add(i3); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); Long i4; for (i4 = 0L; i4 < 10; i4++) { average.add(i4); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); double i5; for (i5 = 0; i5 < 10; i5++) { average.add(i5); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); Double i6; for (i6 = 0.0; i6 < 10; i6++) { average.add(i6); } assertThat(average.getLocalValue()).isCloseTo(4.5, within(0.0)); average.resetLocal(); assertThat(average.getLocalValue()).isCloseTo(0.0, within(0.0)); }
public static double parseBytesToDouble(List data) { return parseBytesToDouble(data, 0); }
@Test public void parseBytesToDouble() { byte[] doubleValByte = {0x0A}; Assertions.assertEquals(0, Double.compare(4.9E-323, TbUtils.parseBytesToDouble(doubleValByte))); doubleValByte = new byte[]{64, -101, 4, -79, 12, -78, -107, -22}; Assertions.assertEquals(0, Double.compare(doubleVal, TbUtils.parseBytesToDouble(doubleValByte, 0))); Assertions.assertEquals(0, Double.compare(doubleValRev, TbUtils.parseBytesToDouble(doubleValByte, 0, 8, false))); List<Byte> doubleValList = Bytes.asList(doubleValByte); Assertions.assertEquals(0, Double.compare(doubleVal, TbUtils.parseBytesToDouble(doubleValList, 0))); Assertions.assertEquals(0, Double.compare(doubleValRev, TbUtils.parseBytesToDouble(doubleValList, 0, 8, false))); doubleValByte = new byte[]{0x7F, (byte) 0xC0, (byte) 0xFF, 0x00, 0x7F, (byte) 0xC0, (byte) 0xFF, 0x00}; double doubleExpectedBe = 2387013.651780523d; double doubleExpectedLe = 7.234601680440024E-304d; double actualBe = TbUtils.parseBytesToDouble(doubleValByte, 0, 8, true); BigDecimal bigDecimal = new BigDecimal(actualBe); // We move the decimal point to the left by 301 positions actualBe = bigDecimal.movePointLeft(301).doubleValue(); Assertions.assertEquals(0, Double.compare(doubleExpectedBe, actualBe)); Assertions.assertEquals(0, Double.compare(doubleExpectedLe, TbUtils.parseBytesToDouble(doubleValByte, 0, 8, false))); doubleValList = Bytes.asList(doubleValByte); actualBe = TbUtils.parseBytesToDouble(doubleValList, 0); bigDecimal = new BigDecimal(actualBe); actualBe = bigDecimal.movePointLeft(301).doubleValue(); Assertions.assertEquals(0, Double.compare(doubleExpectedBe, actualBe)); doubleExpectedLe = 26950.174646662283d; double actualLe = TbUtils.parseBytesToDouble(doubleValList, 0, 5, false); bigDecimal = new BigDecimal(actualLe); actualLe = bigDecimal.movePointRight(316).doubleValue(); Assertions.assertEquals(0, Double.compare(doubleExpectedLe, actualLe)); // 4 294 967 295L == {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF} doubleValByte = new byte[]{-1, -1, -1, -1, -1, -1, -1, -1}; String message = "is a Not-a-Number (NaN) value"; try { TbUtils.parseBytesToDouble(doubleValByte, 0, 8, true); Assertions.fail("Should throw NumberFormatException"); } catch (RuntimeException e) { Assertions.assertTrue(e.getMessage().contains(message)); } }
@Override public AttributedList<Path> search(final Path workdir, final Filter<Path> regex, final ListProgressListener listener) throws BackgroundException { try { return new DriveSearchListService(session, fileid, regex.toString()).list(workdir, listener); } catch(NotfoundException e) { return AttributedList.emptyList(); } }
@Test public void testSearchRoot() throws Exception { final String name = new AlphanumericRandomStringService().random(); final Path workdir = DriveHomeFinderService.MYDRIVE_FOLDER; final DriveFileIdProvider fileid = new DriveFileIdProvider(session); final Path file = new DriveTouchFeature(session, fileid).touch(new Path(workdir, name, EnumSet.of(Path.Type.file)), new TransferStatus()); final DriveSearchFeature feature = new DriveSearchFeature(session, fileid); assertTrue(feature.search(workdir, new SearchFilter(name), new DisabledListProgressListener()).contains(file)); // Supports prefix matching only assertFalse(feature.search(workdir, new SearchFilter(StringUtils.substring(name, 2)), new DisabledListProgressListener()).contains(file)); assertTrue(feature.search(workdir, new SearchFilter(StringUtils.substring(name, 0, name.length() - 2)), new DisabledListProgressListener()).contains(file)); final Path subdir = new Path(workdir, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); assertFalse(feature.search(subdir, new SearchFilter(name), new DisabledListProgressListener()).contains(file)); new DriveDeleteFeature(session, fileid).delete(Arrays.asList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
List<Set<UiNode>> splitByLayer(List<String> layerTags, Set<? extends UiNode> nodes) { final int nLayers = layerTags.size(); if (!layerTags.get(nLayers - 1).equals(LAYER_DEFAULT)) { throw new IllegalArgumentException(E_DEF_NOT_LAST); } List<Set<UiNode>> splitList = new ArrayList<>(layerTags.size()); Map<String, Set<UiNode>> byLayer = new HashMap<>(layerTags.size()); for (String tag : layerTags) { Set<UiNode> set = new HashSet<>(); byLayer.put(tag, set); splitList.add(set); } for (UiNode n : nodes) { String which = n.layer(); if (!layerTags.contains(which)) { which = LAYER_DEFAULT; } byLayer.get(which).add(n); } return splitList; }
@Test public void twoLayers() { title("twoLayers()"); List<Set<UiNode>> result = t2.splitByLayer(PKT_DEF_TAGS, NODES); print(result); assertEquals("wrong split size", 2, result.size()); Set<UiNode> pkt = result.get(0); Set<UiNode> def = result.get(1); assertEquals("pkt bad size", 2, pkt.size()); assertEquals("missing node B", true, pkt.contains(NODE_B)); assertEquals("missing node E", true, pkt.contains(NODE_E)); assertEquals("def bad size", 4, def.size()); assertEquals("missing node D", true, def.contains(NODE_D)); assertEquals("missing node F", true, def.contains(NODE_F)); assertEquals("missing node A", true, def.contains(NODE_A)); assertEquals("missing node C", true, def.contains(NODE_C)); }
@Override public Messages process(Messages messages) { try (Timer.Context ignored = executionTime.time()) { final State latestState = stateUpdater.getLatestState(); if (latestState.enableRuleMetrics()) { return process(messages, new RuleMetricsListener(metricRegistry), latestState); } return process(messages, new NoopInterpreterListener(), latestState); } }
@Test public void testMatchPassContinuesIfOneRuleMatched() { final RuleService ruleService = mock(MongoDbRuleService.class); when(ruleService.loadAll()).thenReturn(ImmutableList.of(RULE_TRUE, RULE_FALSE, RULE_ADD_FOOBAR)); final PipelineService pipelineService = mock(MongoDbPipelineService.class); when(pipelineService.loadAll()).thenReturn(Collections.singleton( PipelineDao.create("p1", "title", "description", "pipeline \"pipeline\"\n" + "stage 0 match pass\n" + " rule \"true\";\n" + " rule \"false\";\n" + "stage 1 match pass\n" + " rule \"add_foobar\";\n" + "end\n", Tools.nowUTC(), null) )); final Map<String, Function<?>> functions = ImmutableMap.of(SetField.NAME, new SetField()); final PipelineInterpreter interpreter = createPipelineInterpreter(ruleService, pipelineService, functions); final Messages processed = interpreter.process(messageInDefaultStream("message", "test")); final List<Message> messages = ImmutableList.copyOf(processed); assertThat(messages).hasSize(1); final Message actualMessage = messages.get(0); assertThat(actualMessage.getFieldAs(String.class, "foobar")).isEqualTo("covfefe"); }
public static String encodeHexString(final byte[] data) { StringBuilder result = new StringBuilder(data.length * 2); for (byte b : data) { result.append(BASE16_CHARS2[(b >>> 4) & 0xF]).append(BASE16_CHARS2[b & 0xF]); } return result.toString(); }
@Test public void testEncodeHexString() { assertEquals( StringUtils.encodeHexString(new byte[] {1, 2, 10, 20, 30, 40, 50}), "01020a141e2832"); }
@Nullable @Override public Message decode(@Nonnull RawMessage rawMessage) { final byte[] payload = rawMessage.getPayload(); final JsonNode event; try { event = objectMapper.readTree(payload); if (event == null || event.isMissingNode()) { throw new IOException("null result"); } } catch (IOException e) { LOG.error("Couldn't decode raw message {}", rawMessage); return null; } return parseEvent(event); }
@Test public void decodeMessagesHandlesGenericBeatWithDocker() throws Exception { final Message message = codec.decode(messageFromJson("generic-with-docker.json")); assertThat(message).isNotNull(); assertThat(message.getMessage()).isEqualTo("-"); assertThat(message.getSource()).isEqualTo("unknown"); assertThat(message.getTimestamp()).isEqualTo(new DateTime(2016, 4, 1, 0, 0, DateTimeZone.UTC)); assertThat(message.getField("beats_type")).isEqualTo("beat"); assertThat(message.getField("beat_foo")).isEqualTo("bar"); assertThat(message.getField("beat_docker_id")).isEqualTo("123"); assertThat(message.getField("beat_docker_name")).isEqualTo("container-1"); assertThat(message.getField("beat_docker_labels_docker-kubernetes-pod")).isEqualTo("hello"); }
@Override public SchemaResult getKeySchema( final Optional<String> topicName, final Optional<Integer> schemaId, final FormatInfo expectedFormat, final SerdeFeatures serdeFeatures ) { return getSchema(topicName, schemaId, expectedFormat, serdeFeatures, true); }
@Test public void shouldReturnErrorFromGetKeyWithIdSchemaIfNotFound() throws Exception { // Given: when(srClient.getSchemaBySubjectAndId(any(), anyInt())) .thenThrow(notFoundException()); // When: final SchemaResult result = supplier.getKeySchema(Optional.of(TOPIC_NAME), Optional.of(42), expectedFormat, SerdeFeatures.of(SerdeFeature.UNWRAP_SINGLES)); // Then: assertThat(result.schemaAndId, is(Optional.empty())); assertThat(result.failureReason, is(not(Optional.empty()))); verifyFailureMessageForKey(result, Optional.of(42)); }
@Override protected Mono<Void> handleSelectorIfNull(final String pluginName, final ServerWebExchange exchange, final ShenyuPluginChain chain) { return WebFluxResultUtils.noSelectorResult(pluginName, exchange); }
@Test public void handleSelectorIfNullTest() { assertNotNull(dividePlugin.handleSelectorIfNull(PluginEnum.DIVIDE.getName(), exchange, chain)); }
@Override public Output run(RunContext runContext) throws Exception { URI from = new URI(runContext.render(this.from)); final PebbleExpressionPredicate predicate = getExpressionPredication(runContext); final Path path = runContext.workingDir().createTempFile(".ion"); long processedItemsTotal = 0L; long droppedItemsTotal = 0L; try (final BufferedWriter writer = Files.newBufferedWriter(path); final BufferedReader reader = newBufferedReader(runContext, from)) { String item; while ((item = reader.readLine()) != null) { IllegalVariableEvaluationException exception = null; Boolean match = null; try { match = predicate.apply(item); } catch (IllegalVariableEvaluationException e) { exception = e; } FilterType action = this.filterType; if (match == null) { switch (errorOrNullBehavior) { case FAIL -> { if (exception != null) { throw exception; } else { throw new IllegalVariableEvaluationException(String.format( "Expression `%s` return `null` on item `%s`", filterCondition, item )); } } case INCLUDE -> action = FilterType.INCLUDE; case EXCLUDE -> action = FilterType.EXCLUDE; } match = true; } if (!match) { action = action.reverse(); } switch (action) { case INCLUDE -> { writer.write(item); writer.newLine(); } case EXCLUDE -> droppedItemsTotal++; } processedItemsTotal++; } } URI uri = runContext.storage().putFile(path.toFile()); return Output.builder() .uri(uri) .processedItemsTotal(processedItemsTotal) .droppedItemsTotal(droppedItemsTotal) .build(); }
@Test void shouldThrowExceptionGivenInvalidRecordsForFail() throws Exception { // Given RunContext runContext = runContextFactory.of(); FilterItems task = FilterItems .builder() .from(generateKeyValueFile(TEST_INVALID_ITEMS, runContext).toString()) .filterCondition(" {{ value % 2 == 0 }}") .filterType(FilterItems.FilterType.INCLUDE) .errorOrNullBehavior(FilterItems.ErrorOrNullBehavior.FAIL) .build(); // When/Then Assertions.assertThrows(IllegalVariableEvaluationException.class, () -> task.run(runContext)); }
public static <T> ProviderBootstrap<T> from(ProviderConfig<T> providerConfig) { String bootstrap = providerConfig.getBootstrap(); if (StringUtils.isEmpty(bootstrap)) { // Use default provider bootstrap bootstrap = RpcConfigs.getStringValue(RpcOptions.DEFAULT_PROVIDER_BOOTSTRAP); providerConfig.setBootstrap(bootstrap); } ProviderBootstrap providerBootstrap = ExtensionLoaderFactory.getExtensionLoader(ProviderBootstrap.class) .getExtension(bootstrap, new Class[] { ProviderConfig.class }, new Object[] { providerConfig }); return (ProviderBootstrap<T>) providerBootstrap; }
@Test public void from() throws Exception { ProviderConfig providerConfig = new ProviderConfig().setBootstrap("test"); ProviderBootstrap bootstrap = Bootstraps.from(providerConfig); Assert.assertEquals(TestProviderBootstrap.class, bootstrap.getClass()); Assert.assertEquals(providerConfig, bootstrap.getProviderConfig()); // if not set bootstrap providerConfig = new ProviderConfig(); bootstrap = Bootstraps.from(providerConfig); Assert.assertEquals(TestProviderBootstrap.class, bootstrap.getClass()); Assert.assertEquals(providerConfig, bootstrap.getProviderConfig()); }
public Map<String, Collection<String>> allPermissionsMap() { return allPermissionsMap; }
@Test public void testPluginPermissions() throws Exception { final ImmutableSet<Permission> pluginPermissions = ImmutableSet.of( Permission.create("foo:bar", "bar"), Permission.create("foo:baz", "baz"), Permission.create("hello:world", "hello") ); final PermissionsPluginPermissions plugin = new PermissionsPluginPermissions(pluginPermissions); final Permissions permissions = new Permissions(ImmutableSet.of(restPermissions, plugin)); assertThat(permissions.allPermissionsMap().get("foo")) .containsOnly("bar", "baz"); assertThat(permissions.allPermissionsMap().get("hello")) .containsOnly("world"); }
public Collection<Service> getSubscribedService() { return subscriberIndexes.keySet(); }
@Test void testGetSubscribedService() { Collection<Service> subscribedService = clientServiceIndexesManager.getSubscribedService(); assertNotNull(subscribedService); assertEquals(1, subscribedService.size()); }
@Override public Object getValueFromResultSet( ResultSet rs, ValueMetaInterface val, int index ) throws KettleDatabaseException { Object data; try { if ( val.getType() == ValueMetaInterface.TYPE_BINARY ) { data = rs.getString( index + 1 ); } else { return super.getValueFromResultSet( rs, val, index ); } if ( rs.wasNull() ) { data = null; } } catch ( SQLException e ) { throw new KettleDatabaseException( "Unable to get value '" + val.toStringMeta() + "' from database resultset, index " + index, e ); } return data; }
@Test public void testGetValueFromResultSet() throws SQLException, KettleDatabaseException { ResultSet rs = mock( ResultSet.class ); //Binary Data Mockito.when( rs.getString( 1 ) ).thenReturn( "HODBACXXXXAAA" ); ValueMetaBinary tb = new ValueMetaBinary( "HODBACXXXXAAA" ); assertEquals( "HODBACXXXXAAA", dbMeta.getValueFromResultSet( rs,tb,0 ) ); //Super class function calling Mockito.when( rs.getString( 2 ) ).thenReturn( "AzureDB" ); ValueMetaString ts = new ValueMetaString( "AzureDB" ); assertEquals( "AzureDB", dbMeta.getValueFromResultSet( rs,ts,1 ) ); //ResultSet was null Mockito.when( rs.wasNull() ).thenReturn( true ); assertNull( dbMeta.getValueFromResultSet( rs,tb,2 ) ); }
public T add(String str) { requireNonNull(str, JVM_OPTION_NOT_NULL_ERROR_MESSAGE); String value = str.trim(); if (isInvalidOption(value)) { throw new IllegalArgumentException("a JVM option can't be empty and must start with '-'"); } checkMandatoryOptionOverwrite(value); options.add(value); return castThis(); }
@Test @UseDataProvider("variousEmptyStrings") public void add_throws_IAE_if_argument_is_empty(String emptyString) { expectJvmOptionNotEmptyAndStartByDashIAE(() -> underTest.add(emptyString)); }
public Duration getServerTimeoutOrThrow() { // readTimeout = DOWNSTREAM_OVERHEAD + serverTimeout TimeBudget serverBudget = readBudget().withReserved(DOWNSTREAM_OVERHEAD); if (serverBudget.timeLeft().get().compareTo(MIN_SERVER_TIMEOUT) < 0) throw new UncheckedTimeoutException("Timed out after " + timeBudget.originalTimeout().get()); return serverBudget.timeLeft().get(); }
@Test public void alreadyTimedOut() { clock.advance(Duration.ofSeconds(4)); try { timeouts.getServerTimeoutOrThrow(); fail(); } catch (UncheckedTimeoutException e) { assertEquals("Timed out after PT3S", e.getMessage()); } }
public static Type resolveClassIndexedParameter(Type type, Class<?> source, int index) { return calculateParameterValue(resolveParameterValues(type), source.getTypeParameters()[index]); }
@Test void test() { assertSame(TypeParameterResolver.resolveClassIndexedParameter(Mapper1.class, Mapper.class, 0), null); assertSame(TypeParameterResolver.resolveClassIndexedParameter(Mapper2.class, Mapper.class, 0), MyEntity.class); assertSame(TypeParameterResolver.resolveClassIndexedParameter(Mapper3.class, Mapper.class, 0), MyEntity.class); assertSame(TypeParameterResolver.resolveClassIndexedParameter(Mapper5.class, Mapper.class, 0), MyEntity.class); assertSame(TypeParameterResolver.resolveClassIndexedParameter(MyEntity.class, CA.class, 0), Number.class); assertSame(TypeParameterResolver.resolveClassIndexedParameter(MyEntity.class, CB.class, 1), Number.class); }
public MessageExtBrokerInner renewHalfMessageInner(MessageExt msgExt) { MessageExtBrokerInner msgInner = new MessageExtBrokerInner(); msgInner.setTopic(msgExt.getTopic()); msgInner.setBody(msgExt.getBody()); msgInner.setQueueId(msgExt.getQueueId()); msgInner.setMsgId(msgExt.getMsgId()); msgInner.setSysFlag(msgExt.getSysFlag()); msgInner.setTags(msgExt.getTags()); msgInner.setTagsCode(MessageExtBrokerInner.tagsString2tagsCode(msgInner.getTags())); MessageAccessor.setProperties(msgInner, msgExt.getProperties()); msgInner.setPropertiesString(MessageDecoder.messageProperties2String(msgExt.getProperties())); msgInner.setBornTimestamp(msgExt.getBornTimestamp()); msgInner.setBornHost(msgExt.getBornHost()); msgInner.setStoreHost(msgExt.getStoreHost()); msgInner.setWaitStoreMsgOK(false); return msgInner; }
@Test public void testRenewHalfMessageInner() { MessageExt messageExt = new MessageExt(); long bornTimeStamp = messageExt.getBornTimestamp(); MessageExt messageExtRes = transactionBridge.renewHalfMessageInner(messageExt); assertThat(messageExtRes.getBornTimestamp()).isEqualTo(bornTimeStamp); }
public String getContainerId() { return containerId; }
@Test public void testGetContainerId() { assertEquals(CONTAINER_ID, deletionTask.getContainerId()); }
static MD5Hash getFileClient(URL infoServer, String queryString, List<File> localPaths, Storage dstStorage, boolean getChecksum) throws IOException { URL url = new URL(infoServer, ImageServlet.PATH_SPEC + "?" + queryString); LOG.info("Opening connection to " + url); return doGetUrl(url, localPaths, dstStorage, getChecksum); }
@Test public void testClientSideException() throws IOException { Configuration conf = new HdfsConfiguration(); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(0).build(); NNStorage mockStorage = Mockito.mock(NNStorage.class); List<File> localPath = Collections.singletonList( new File("/xxxxx-does-not-exist/blah")); try { URL fsName = DFSUtil.getInfoServer( cluster.getNameNode().getServiceRpcAddress(), conf, DFSUtil.getHttpClientScheme(conf)).toURL(); String id = "getimage=1&txid=0"; TransferFsImage.getFileClient(fsName, id, localPath, mockStorage, false); fail("Didn't get an exception!"); } catch (IOException ioe) { Mockito.verify(mockStorage).reportErrorOnFile(localPath.get(0)); assertTrue( "Unexpected exception: " + StringUtils.stringifyException(ioe), ioe.getMessage().contains("Unable to download to any storage")); } finally { cluster.shutdown(); } }
public ValueCollection values() { if (null == valueCollection) { valueCollection = new ValueCollection(); } return valueCollection; }
@Test void removeIfOnValuesCollection() { final Predicate<String> filter = (value) -> value.contains("e"); final UnsupportedOperationException exception = assertThrowsExactly(UnsupportedOperationException.class, () -> cache.values().removeIf(filter)); assertEquals("Cannot remove from ValueCollection", exception.getMessage()); }
public QueryCacheConfig setEntryListenerConfigs(List<EntryListenerConfig> listenerConfigs) { checkNotNull(listenerConfigs, "listenerConfig cannot be null"); this.entryListenerConfigs = listenerConfigs; return this; }
@Test(expected = NullPointerException.class) public void testSetEntryListenerConfigs_throwsException_whenNull() { QueryCacheConfig config = new QueryCacheConfig(); config.setEntryListenerConfigs(null); }
@Override public OAuth2AccessTokenDO refreshAccessToken(String refreshToken, String clientId) { // 查询访问令牌 OAuth2RefreshTokenDO refreshTokenDO = oauth2RefreshTokenMapper.selectByRefreshToken(refreshToken); if (refreshTokenDO == null) { throw exception0(GlobalErrorCodeConstants.BAD_REQUEST.getCode(), "无效的刷新令牌"); } // 校验 Client 匹配 OAuth2ClientDO clientDO = oauth2ClientService.validOAuthClientFromCache(clientId); if (ObjectUtil.notEqual(clientId, refreshTokenDO.getClientId())) { throw exception0(GlobalErrorCodeConstants.BAD_REQUEST.getCode(), "刷新令牌的客户端编号不正确"); } // 移除相关的访问令牌 List<OAuth2AccessTokenDO> accessTokenDOs = oauth2AccessTokenMapper.selectListByRefreshToken(refreshToken); if (CollUtil.isNotEmpty(accessTokenDOs)) { oauth2AccessTokenMapper.deleteBatchIds(convertSet(accessTokenDOs, OAuth2AccessTokenDO::getId)); oauth2AccessTokenRedisDAO.deleteList(convertSet(accessTokenDOs, OAuth2AccessTokenDO::getAccessToken)); } // 已过期的情况下,删除刷新令牌 if (DateUtils.isExpired(refreshTokenDO.getExpiresTime())) { oauth2RefreshTokenMapper.deleteById(refreshTokenDO.getId()); throw exception0(GlobalErrorCodeConstants.UNAUTHORIZED.getCode(), "刷新令牌已过期"); } // 创建访问令牌 return createOAuth2AccessToken(refreshTokenDO, clientDO); }
@Test public void testRefreshAccessToken_expired() { // 准备参数 String refreshToken = randomString(); String clientId = randomString(); // mock 方法 OAuth2ClientDO clientDO = randomPojo(OAuth2ClientDO.class).setClientId(clientId); when(oauth2ClientService.validOAuthClientFromCache(eq(clientId))).thenReturn(clientDO); // mock 数据(访问令牌) OAuth2RefreshTokenDO refreshTokenDO = randomPojo(OAuth2RefreshTokenDO.class) .setRefreshToken(refreshToken).setClientId(clientId) .setExpiresTime(LocalDateTime.now().minusDays(1)); oauth2RefreshTokenMapper.insert(refreshTokenDO); // 调用,并断言 assertServiceException(() -> oauth2TokenService.refreshAccessToken(refreshToken, clientId), new ErrorCode(401, "刷新令牌已过期")); assertEquals(0, oauth2RefreshTokenMapper.selectCount()); }
@Override public void unsubscribe(URL url, NotifyListener listener) { if (url == null) { throw new IllegalArgumentException("unsubscribe url == null"); } if (listener == null) { throw new IllegalArgumentException("unsubscribe listener == null"); } if (logger.isInfoEnabled()) { logger.info("Unsubscribe: " + url); } Set<NotifyListener> listeners = subscribed.get(url); if (listeners != null) { listeners.remove(listener); } // do not forget remove notified notified.remove(url); }
@Test void testUnsubscribeIfUrlNull() { Assertions.assertThrows(IllegalArgumentException.class, () -> { final AtomicReference<Boolean> notified = new AtomicReference<Boolean>(false); NotifyListener listener = urls -> notified.set(Boolean.TRUE); abstractRegistry.unsubscribe(null, listener); Assertions.fail("unsubscribe url == null"); }); }
@SuppressWarnings("unchecked") public <T extends Expression> T rewrite(final T expression, final C context) { return (T) rewriter.process(expression, context); }
@Test public void shouldRewriteLikePredicate() { // Given: final LikePredicate parsed = parseExpression("col1 LIKE '%foo%' ESCAPE '!'"); when(processor.apply(parsed.getValue(), context)).thenReturn(expr1); when(processor.apply(parsed.getPattern(), context)).thenReturn(expr2); // When: final Expression rewritten = expressionRewriter.rewrite(parsed, context); // Then: assertThat(rewritten, equalTo(new LikePredicate(parsed.getLocation(), expr1, expr2, Optional.of('!')))); }
public ScopedSpan startScopedSpan(String name) { return startScopedSpanWithParent(name, currentTraceContext.get()); }
@Test void startScopedSpan_overrideName() { ScopedSpan scoped = tracer.startScopedSpan("foo"); try { scoped.name("bar"); } finally { scoped.finish(); } assertThat(spans.get(0).name()).isEqualTo("bar"); }
@Override public int getFlushBulkSize() { return 100; }
@Test public void getFlushBulkSize() { Assert.assertEquals(100, mSensorsAPI.getFlushBulkSize()); }
Converter<E> compile() { head = tail = null; for (Node n = top; n != null; n = n.next) { switch (n.type) { case Node.LITERAL: addToList(new LiteralConverter<E>((String) n.getValue())); break; case Node.COMPOSITE_KEYWORD: CompositeNode cn = (CompositeNode) n; CompositeConverter<E> compositeConverter = createCompositeConverter(cn); if(compositeConverter == null) { addError("Failed to create converter for [%"+cn.getValue()+"] keyword"); addToList(new LiteralConverter<E>("%PARSER_ERROR["+cn.getValue()+"]")); break; } compositeConverter.setFormattingInfo(cn.getFormatInfo()); compositeConverter.setOptionList(cn.getOptions()); Compiler<E> childCompiler = new Compiler<E>(cn.getChildNode(), converterMap); childCompiler.setContext(context); Converter<E> childConverter = childCompiler.compile(); compositeConverter.setChildConverter(childConverter); addToList(compositeConverter); break; case Node.SIMPLE_KEYWORD: SimpleKeywordNode kn = (SimpleKeywordNode) n; DynamicConverter<E> dynaConverter = createConverter(kn); if (dynaConverter != null) { dynaConverter.setFormattingInfo(kn.getFormatInfo()); dynaConverter.setOptionList(kn.getOptions()); addToList(dynaConverter); } else { // if the appropriate dynaconverter cannot be found, then replace // it with a dummy LiteralConverter indicating an error. Converter<E> errConveter = new LiteralConverter<E>("%PARSER_ERROR[" + kn.getValue() + "]"); addStatus(new ErrorStatus("[" + kn.getValue() + "] is not a valid conversion word", this)); addToList(errConveter); } } } return head; }
@Test public void testBasic() throws Exception { { Parser<Object> p = new Parser<Object>("abc %hello"); p.setContext(context); Node t = p.parse(); Converter<Object> head = p.compile(t, converterMap); String result = write(head, new Object()); assertEquals("abc Hello", result); } { Parser<Object> p = new Parser<Object>("abc %hello %OTT"); p.setContext(context); Node t = p.parse(); Converter<Object> head = p.compile(t, converterMap); String result = write(head, new Object()); assertEquals("abc Hello 123", result); } }
@Override public Result apply(ApplyNode applyNode, Captures captures, Context context) { if (applyNode.getSubqueryAssignments().size() != 1) { return Result.empty(); } RowExpression expression = getOnlyElement(applyNode.getSubqueryAssignments().getExpressions()); if (!(expression instanceof InSubqueryExpression)) { return Result.empty(); } InSubqueryExpression inPredicate = (InSubqueryExpression) expression; VariableReferenceExpression semiJoinVariable = getOnlyElement(applyNode.getSubqueryAssignments().getVariables()); SemiJoinNode replacement = new SemiJoinNode( applyNode.getSourceLocation(), context.getIdAllocator().getNextId(), applyNode.getInput(), applyNode.getSubquery(), inPredicate.getValue(), inPredicate.getSubquery(), semiJoinVariable, Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of()); return Result.ofPlanNode(replacement); }
@Test public void testDoesNotFireOnNonInPredicateSubquery() { tester().assertThat(new TransformUncorrelatedInPredicateSubqueryToSemiJoin()) .on(p -> p.apply( assignment(p.variable("x"), new ExistsExpression(Optional.empty(), TRUE_CONSTANT)), emptyList(), p.values(), p.values())) .doesNotFire(); }
@Override public void check(Model model) { if (model == null) return; List<Model> secondPhaseModels = new ArrayList<>(); deepFindAllModelsOfType(AppenderModel.class, secondPhaseModels, model); deepFindAllModelsOfType(LoggerModel.class, secondPhaseModels, model); deepFindAllModelsOfType(RootLoggerModel.class, secondPhaseModels, model); List<Pair<Model, Model>> nestedPairs = deepFindNestedSubModelsOfType(IfModel.class, secondPhaseModels); if (nestedPairs.isEmpty()) return; addWarn("<if> elements cannot be nested within an <appender>, <logger> or <root> element"); addWarn("See also " + NESTED_IF_WARNING_URL); for (Pair<Model, Model> pair : nestedPairs) { Model p = pair.first; int pLine = p.getLineNumber(); Model s = pair.second; int sLine = s.getLineNumber(); addWarn("Element <"+p.getTag()+"> at line " + pLine + " contains a nested <"+s.getTag()+"> element at line " +sLine); } }
@Test public void singleLoggerWithNestedIf() { ClassicTopModel topModel = new ClassicTopModel(); Model rootLoggerModel = setupModel(new RootLoggerModel(), "root", 1); topModel.addSubModel(rootLoggerModel); Model ifModel0 = setupModel(new IfModel(), "if", 2); rootLoggerModel.addSubModel(ifModel0); Model loggerModel = setupModel(new LoggerModel(), "logger", 3); topModel.addSubModel(loggerModel); Model ifModel1 = setupModel(new IfModel(), "if", 4); loggerModel.addSubModel(ifModel1); Model appenderModel = setupModel(new LoggerModel(), "appender", 5); topModel.addSubModel(appenderModel); Model ifModel2 = setupModel(new IfModel(), "if", 6); appenderModel.addSubModel(ifModel2); inwspeChecker.check(topModel); StatusPrinter.print(context); // Element <root> at line 1 contains a nested <if> element at line 2 String regex0 = "Element <root> at line 1 contains a nested <if> element at line 2"; statusChecker.assertContainsMatch(Status.WARN, regex0); String regex1 = "Element <logger> at line 3 contains a nested <if> element at line 4"; statusChecker.assertContainsMatch(Status.WARN, regex1); String regex2 = "Element <appender> at line 5 contains a nested <if> element at line 6"; statusChecker.assertContainsMatch(Status.WARN, regex2); }
public byte findRecoveryId(Sha256Hash hash, ECDSASignature sig) { byte recId = -1; for (byte i = 0; i < 4; i++) { ECKey k = ECKey.recoverFromSignature(i, sig, hash, isCompressed()); if (k != null && k.pub.equals(pub)) { recId = i; break; } } if (recId == -1) throw new RuntimeException("Could not construct a recoverable key. This should never happen."); return recId; }
@Test public void findRecoveryId() { ECKey key = new ECKey(); String message = "Hello World!"; Sha256Hash hash = Sha256Hash.of(message.getBytes()); ECKey.ECDSASignature sig = key.sign(hash); key = ECKey.fromPublicOnly(key); List<Byte> possibleRecIds = Lists.newArrayList((byte) 0, (byte) 1, (byte) 2, (byte) 3); byte recId = key.findRecoveryId(hash, sig); assertTrue(possibleRecIds.contains(recId)); }
public boolean putIfAbsent(long key1, long key2, long value1, long value2) { checkBiggerEqualZero(key1); checkBiggerEqualZero(value1); long h = hash(key1, key2); return getSection(h).put(key1, key2, value1, value2, (int) h, true); }
@Test public void testPutIfAbsent() { ConcurrentLongLongPairHashMap map = ConcurrentLongLongPairHashMap.newBuilder() .build(); assertTrue(map.putIfAbsent(1, 1, 11, 11)); assertEquals(map.get(1, 1), new LongPair(11, 11)); assertFalse(map.putIfAbsent(1, 1, 111, 111)); assertEquals(map.get(1, 1), new LongPair(11, 11)); }
public static ResourceModel processResource(final Class<?> resourceClass) { return processResource(resourceClass, null); }
@Test(description = "verifies simple resource for main properties") public void simpleRootResource() { final String expectedNamespace = ""; final String expectedName = "foo"; final String expectedD2ServiceName = "foo3"; final Class<? extends RecordTemplate> expectedValueClass = EmptyRecord.class; final ResourceModel model = RestLiAnnotationReader.processResource(FooResource3.class); Assert.assertNotNull(model); Assert.assertTrue(model.isRoot()); Assert.assertEquals(expectedName, model.getName()); Assert.assertEquals(expectedNamespace, model.getNamespace()); Assert.assertEquals(expectedD2ServiceName, model.getD2ServiceName()); Assert.assertNull(model.getParentResourceClass()); Assert.assertNull(model.getParentResourceModel()); // keys Assert.assertEquals(0, model.getKeys().size()); Assert.assertEquals(0, model.getKeyNames().size()); Assert.assertEquals(0, model.getKeyClasses().size()); // primary key Assert.assertNull(model.getPrimaryKey()); // alternative key Assert.assertTrue(model.getAlternativeKeys().isEmpty()); // model Assert.assertNotNull(model.getValueClass()); Assert.assertEquals(expectedValueClass, model.getValueClass()); }
@Override public void load(String mountTableConfigPath, Configuration conf) throws IOException { this.mountTable = new Path(mountTableConfigPath); String scheme = mountTable.toUri().getScheme(); FsGetter fsGetter = new ViewFileSystemOverloadScheme.ChildFsGetter(scheme); try (FileSystem fs = fsGetter.getNewInstance(mountTable.toUri(), conf)) { RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(mountTable, false); LocatedFileStatus lfs = null; int higherVersion = -1; while (listFiles.hasNext()) { LocatedFileStatus curLfs = listFiles.next(); String cur = curLfs.getPath().getName(); String[] nameParts = cur.split(REGEX_DOT); if (nameParts.length < 2) { logInvalidFileNameFormat(cur); continue; // invalid file name } int curVersion = higherVersion; try { curVersion = Integer.parseInt(nameParts[nameParts.length - 2]); } catch (NumberFormatException nfe) { logInvalidFileNameFormat(cur); continue; } if (curVersion > higherVersion) { higherVersion = curVersion; lfs = curLfs; } } if (lfs == null) { // No valid mount table file found. // TODO: Should we fail? Currently viewfs init will fail if no mount // links anyway. LOGGER.warn("No valid mount-table file exist at: {}. At least one " + "mount-table file should present with the name format: " + "mount-table.<versionNumber>.xml", mountTableConfigPath); return; } // Latest version file. Path latestVersionMountTable = lfs.getPath(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Loading the mount-table {} into configuration.", latestVersionMountTable); } try (FSDataInputStream open = fs.open(latestVersionMountTable)) { Configuration newConf = new Configuration(false); newConf.addResource(open); // This will add configuration props as resource, instead of stream // itself. So, that stream can be closed now. conf.addResource(newConf); } } }
@Test public void testMountTableFileWithInvalidFormat() throws Exception { Path path = new Path(new URI( targetTestRoot.toString() + "/testMountTableFileWithInvalidFormat/")); fsTarget.mkdirs(path); File invalidMountFileName = new File(new URI(path.toString() + "/table.InvalidVersion.xml")); invalidMountFileName.createNewFile(); // Adding mount links to make sure it will not read it. ViewFsTestSetup.addMountLinksToFile(TABLE_NAME, new String[] {SRC_ONE, SRC_TWO }, new String[] {TARGET_ONE, TARGET_TWO }, new Path(invalidMountFileName.toURI()), conf); // Pass mount table directory loader.load(path.toString(), conf); Assert.assertEquals(null, conf.get(MOUNT_LINK_KEY_SRC_TWO)); Assert.assertEquals(null, conf.get(MOUNT_LINK_KEY_SRC_ONE)); invalidMountFileName.delete(); }
@Override public List<String> assignSegment(String segmentName, Map<String, Map<String, String>> currentAssignment, InstancePartitions instancePartitions, InstancePartitionsType instancePartitionsType) { int numPartitions = instancePartitions.getNumPartitions(); checkReplication(instancePartitions, _replication, _tableName); int partitionId; if (_partitionColumn == null || numPartitions == 1) { partitionId = 0; } else { // Uniformly spray the segment partitions over the instance partitions if (_tableConfig.getTableType() == TableType.OFFLINE) { partitionId = SegmentAssignmentUtils .getOfflineSegmentPartitionId(segmentName, _tableName, _helixManager, _partitionColumn) % numPartitions; } else { partitionId = SegmentAssignmentUtils .getRealtimeSegmentPartitionId(segmentName, _tableName, _helixManager, _partitionColumn) % numPartitions; } } return SegmentAssignmentUtils.assignSegmentWithReplicaGroup(currentAssignment, instancePartitions, partitionId); }
@Test public void testOneReplicaWithPartition() { // Mock HelixManager ZkHelixPropertyStore<ZNRecord> propertyStore = mock(ZkHelixPropertyStore.class); List<ZNRecord> segmentZKMetadataZNRecords = new ArrayList<>(NUM_SEGMENTS); for (int segmentId = 0; segmentId < NUM_SEGMENTS; segmentId++) { String segmentName = SEGMENTS.get(segmentId); SegmentZKMetadata segmentZKMetadata = new SegmentZKMetadata(segmentName); int partitionId = segmentId % NUM_PARTITIONS; segmentZKMetadata.setPartitionMetadata(new SegmentPartitionMetadata(Collections.singletonMap(PARTITION_COLUMN, new ColumnPartitionMetadata(null, NUM_PARTITIONS, Collections.singleton(partitionId), null)))); ZNRecord segmentZKMetadataZNRecord = segmentZKMetadata.toZNRecord(); when(propertyStore.get( eq(ZKMetadataProvider.constructPropertyStorePathForSegment(OFFLINE_TABLE_NAME_WITH_PARTITION, segmentName)), any(), anyInt())).thenReturn(segmentZKMetadataZNRecord); segmentZKMetadataZNRecords.add(segmentZKMetadataZNRecord); } when(propertyStore .getChildren(eq(ZKMetadataProvider.constructPropertyStorePathForResource(OFFLINE_TABLE_NAME_WITH_PARTITION)), any(), anyInt(), anyInt(), anyInt())).thenReturn(segmentZKMetadataZNRecords); HelixManager helixManager = mock(HelixManager.class); when(helixManager.getHelixPropertyStore()).thenReturn(propertyStore); int numInstancesPerPartition = NUM_INSTANCES / NUM_PARTITIONS; ReplicaGroupStrategyConfig replicaGroupStrategyConfig = new ReplicaGroupStrategyConfig(PARTITION_COLUMN, numInstancesPerPartition); TableConfig tableConfigWithPartitions = new TableConfigBuilder(TableType.OFFLINE).setTableName(RAW_TABLE_NAME_WITH_PARTITION).setNumReplicas(1) .setSegmentAssignmentStrategy(AssignmentStrategy.REPLICA_GROUP_SEGMENT_ASSIGNMENT_STRATEGY).build(); tableConfigWithPartitions.getValidationConfig().setReplicaGroupStrategyConfig(replicaGroupStrategyConfig); SegmentAssignment segmentAssignment = SegmentAssignmentFactory.getSegmentAssignment(helixManager, tableConfigWithPartitions, null); // { // 0_0=[instance_0, instance_1, instance_2, instance_3, instance_4, instance_5], // 1_0=[instance_6, instance_7, instance_8, instance_9, instance_10, instance_11], // 2_0=[instance_12, instance_13, instance_14, instance_15, instance_16, instance_17], // } InstancePartitions instancePartitions = new InstancePartitions(INSTANCE_PARTITIONS_NAME_WITH_PARTITION); int instanceIdToAdd = 0; for (int partitionId = 0; partitionId < NUM_PARTITIONS; partitionId++) { List<String> instancesForPartition = new ArrayList<>(numInstancesPerPartition); for (int i = 0; i < numInstancesPerPartition; i++) { instancesForPartition.add(INSTANCES.get(instanceIdToAdd++)); } instancePartitions.setInstances(partitionId, 0, instancesForPartition); } Map<InstancePartitionsType, InstancePartitions> instancePartitionsMap = Collections.singletonMap(InstancePartitionsType.OFFLINE, instancePartitions); // Test assignment Map<String, Map<String, String>> currentAssignment = new TreeMap<>(); for (int segmentId = 0; segmentId < NUM_SEGMENTS; segmentId++) { String segmentName = SEGMENTS.get(segmentId); List<String> instancesAssigned = segmentAssignment.assignSegment(segmentName, currentAssignment, instancePartitionsMap); assertEquals(instancesAssigned.size(), 1); // Segment 0 (partition 0) should be assigned to instance 0 // Segment 1 (partition 1) should be assigned to instance 6 // Segment 2 (partition 2) should be assigned to instance 12 // Segment 3 (partition 0) should be assigned to instance 1 // Segment 4 (partition 1) should be assigned to instance 7 // Segment 5 (partition 2) should be assigned to instance 13 // Segment 6 (partition 0) should be assigned to instance 2 // Segment 7 (partition 1) should be assigned to instance 8 // ... int partitionId = segmentId % NUM_PARTITIONS; int expectedAssignedInstanceId = (segmentId % NUM_INSTANCES) / NUM_PARTITIONS + partitionId * numInstancesPerPartition; assertEquals(instancesAssigned.get(0), INSTANCES.get(expectedAssignedInstanceId)); currentAssignment .put(segmentName, SegmentAssignmentUtils.getInstanceStateMap(instancesAssigned, SegmentStateModel.ONLINE)); } // Current assignment should already be balanced assertEquals( segmentAssignment.rebalanceTable(currentAssignment, instancePartitionsMap, null, null, new RebalanceConfig()), currentAssignment); // Test bootstrap // Bootstrap table should reassign all segments based on their alphabetical order within the partition RebalanceConfig rebalanceConfig = new RebalanceConfig(); rebalanceConfig.setBootstrap(true); Map<String, Map<String, String>> newAssignment = segmentAssignment.rebalanceTable(currentAssignment, instancePartitionsMap, null, null, rebalanceConfig); assertEquals(newAssignment.size(), NUM_SEGMENTS); int numSegmentsPerPartition = NUM_SEGMENTS / NUM_PARTITIONS; String[][] partitionIdToSegmentsMap = new String[NUM_PARTITIONS][numSegmentsPerPartition]; for (int i = 0; i < NUM_SEGMENTS; i++) { partitionIdToSegmentsMap[i % NUM_PARTITIONS][i / NUM_PARTITIONS] = SEGMENTS.get(i); } String[][] partitionIdToSortedSegmentsMap = new String[NUM_PARTITIONS][numSegmentsPerPartition]; for (int i = 0; i < NUM_PARTITIONS; i++) { String[] sortedSegments = new String[numSegmentsPerPartition]; System.arraycopy(partitionIdToSegmentsMap[i], 0, sortedSegments, 0, numSegmentsPerPartition); Arrays.sort(sortedSegments); partitionIdToSortedSegmentsMap[i] = sortedSegments; } for (int i = 0; i < NUM_PARTITIONS; i++) { for (int j = 0; j < numSegmentsPerPartition; j++) { assertEquals(newAssignment.get(partitionIdToSortedSegmentsMap[i][j]), currentAssignment.get(partitionIdToSegmentsMap[i][j])); } } }
@Override public void route(final RouteContext routeContext, final SingleRule singleRule) { if (routeContext.getRouteUnits().isEmpty() || sqlStatement instanceof SelectStatement) { routeStatement(routeContext, singleRule); } else { RouteContext newRouteContext = new RouteContext(); routeStatement(newRouteContext, singleRule); combineRouteContext(routeContext, newRouteContext); } }
@Test void assertRouteWithoutSingleRule() throws SQLException { SingleStandardRouteEngine engine = new SingleStandardRouteEngine(mockQualifiedTables(), new MySQLCreateTableStatement(false)); SingleRule singleRule = new SingleRule(new SingleRuleConfiguration(), DefaultDatabase.LOGIC_NAME, new MySQLDatabaseType(), createDataSourceMap(), Collections.emptyList()); RouteContext routeContext = new RouteContext(); engine.route(routeContext, singleRule); List<RouteUnit> routeUnits = new ArrayList<>(routeContext.getRouteUnits()); assertThat(routeContext.getRouteUnits().size(), is(1)); assertThat(routeUnits.get(0).getTableMappers().size(), is(1)); Iterator<RouteMapper> tableMappers = routeUnits.get(0).getTableMappers().iterator(); RouteMapper tableMapper0 = tableMappers.next(); assertThat(tableMapper0.getActualName(), is("t_order")); assertThat(tableMapper0.getLogicName(), is("t_order")); }
public void addMessage(final SelectMappedBufferResult mapedBuffer) { this.messageMapedList.add(mapedBuffer); this.messageBufferList.add(mapedBuffer.getByteBuffer()); this.bufferTotalSize += mapedBuffer.getSize(); this.msgCount4Commercial += (int) Math.ceil( mapedBuffer.getSize() / (double)commercialSizePerMsg); this.messageCount++; }
@Test public void testAddMessage() { GetMessageResult getMessageResult = new GetMessageResult(); SelectMappedBufferResult mappedBufferResult1 = new SelectMappedBufferResult(0, null, 4 * 1024, null); getMessageResult.addMessage(mappedBufferResult1); SelectMappedBufferResult mappedBufferResult2 = new SelectMappedBufferResult(0, null, 2 * 4 * 1024, null); getMessageResult.addMessage(mappedBufferResult2, 0); SelectMappedBufferResult mappedBufferResult3 = new SelectMappedBufferResult(0, null, 4 * 4 * 1024, null); getMessageResult.addMessage(mappedBufferResult3, 0, 2); Assert.assertEquals(getMessageResult.getMessageQueueOffset().size(), 2); Assert.assertEquals(getMessageResult.getMessageBufferList().size(), 3); Assert.assertEquals(getMessageResult.getMessageMapedList().size(), 3); Assert.assertEquals(getMessageResult.getMessageCount(), 4); Assert.assertEquals(getMessageResult.getMsgCount4Commercial(), 1 + 2 + 4); Assert.assertEquals(getMessageResult.getBufferTotalSize(), (1 + 2 + 4) * 4 * 1024); }
@SuppressWarnings( "unchecked" ) @Nullable public <T extends VFSConnectionDetails> VFSConnectionProvider<T> getProvider( @NonNull ConnectionManager manager, @Nullable String key ) { return (VFSConnectionProvider<T>) manager.getConnectionProvider( key ); }
@Test public void testGetProviderOfDetailsReturnsNullForNonExistingProviderInManager() { String provider1Key = "missingProvider1"; VFSConnectionDetails details1 = mock( VFSConnectionDetails.class ); doReturn( provider1Key ).when( details1 ).getType(); VFSConnectionProvider<VFSConnectionDetails> result = vfsConnectionManagerHelper.getProvider( connectionManager, details1 ); assertNull( result ); }
public static <T> MutationDetector forValueWithCoder(T value, Coder<T> coder) throws CoderException { if (value == null) { return noopMutationDetector(); } else { return new CodedValueMutationDetector<>(value, coder); } }
@Test public void testUnmodifiedArray() throws Exception { byte[] value = new byte[] {0x1, 0x2, 0x3, 0x4}; MutationDetector detector = MutationDetectors.forValueWithCoder(value, ByteArrayCoder.of()); detector.verifyUnmodified(); }
public static Ip4Prefix valueOf(int address, int prefixLength) { return new Ip4Prefix(Ip4Address.valueOf(address), prefixLength); }
@Test public void testValueOfStringIPv4() { Ip4Prefix ipPrefix; ipPrefix = Ip4Prefix.valueOf("1.2.3.4/24"); assertThat(ipPrefix.toString(), is("1.2.3.0/24")); ipPrefix = Ip4Prefix.valueOf("1.2.3.4/32"); assertThat(ipPrefix.toString(), is("1.2.3.4/32")); ipPrefix = Ip4Prefix.valueOf("1.2.3.5/32"); assertThat(ipPrefix.toString(), is("1.2.3.5/32")); ipPrefix = Ip4Prefix.valueOf("0.0.0.0/0"); assertThat(ipPrefix.toString(), is("0.0.0.0/0")); ipPrefix = Ip4Prefix.valueOf("0.0.0.0/32"); assertThat(ipPrefix.toString(), is("0.0.0.0/32")); ipPrefix = Ip4Prefix.valueOf("255.255.255.255/0"); assertThat(ipPrefix.toString(), is("0.0.0.0/0")); ipPrefix = Ip4Prefix.valueOf("255.255.255.255/16"); assertThat(ipPrefix.toString(), is("255.255.0.0/16")); ipPrefix = Ip4Prefix.valueOf("255.255.255.255/32"); assertThat(ipPrefix.toString(), is("255.255.255.255/32")); }
@Override boolean isCacheable() { return false; }
@Test public void test_isCacheable() { boolean cacheable = NullMultiValueGetter.NULL_MULTIVALUE_GETTER.isCacheable(); assertFalse(cacheable); }
@Override @Transactional(rollbackFor = Exception.class) public Long createJob(JobSaveReqVO createReqVO) throws SchedulerException { validateCronExpression(createReqVO.getCronExpression()); // 1.1 校验唯一性 if (jobMapper.selectByHandlerName(createReqVO.getHandlerName()) != null) { throw exception(JOB_HANDLER_EXISTS); } // 1.2 校验 JobHandler 是否存在 validateJobHandlerExists(createReqVO.getHandlerName()); // 2. 插入 JobDO JobDO job = BeanUtils.toBean(createReqVO, JobDO.class); job.setStatus(JobStatusEnum.INIT.getStatus()); fillJobMonitorTimeoutEmpty(job); jobMapper.insert(job); // 3.1 添加 Job 到 Quartz 中 schedulerManager.addJob(job.getId(), job.getHandlerName(), job.getHandlerParam(), job.getCronExpression(), createReqVO.getRetryCount(), createReqVO.getRetryInterval()); // 3.2 更新 JobDO JobDO updateObj = JobDO.builder().id(job.getId()).status(JobStatusEnum.NORMAL.getStatus()).build(); jobMapper.updateById(updateObj); return job.getId(); }
@Test public void testCreateJob_success() throws SchedulerException { // 准备参数 指定 Cron 表达式 JobSaveReqVO reqVO = randomPojo(JobSaveReqVO.class, o -> o.setCronExpression("0 0/1 * * * ? *")) .setId(null); try (MockedStatic<SpringUtil> springUtilMockedStatic = mockStatic(SpringUtil.class)) { springUtilMockedStatic.when(() -> SpringUtil.getBean(eq(reqVO.getHandlerName()))) .thenReturn(jobLogCleanJob); // 调用 Long jobId = jobService.createJob(reqVO); // 断言 assertNotNull(jobId); // 校验记录的属性是否正确 JobDO job = jobMapper.selectById(jobId); assertPojoEquals(reqVO, job, "id"); assertEquals(JobStatusEnum.NORMAL.getStatus(), job.getStatus()); // 校验调用 verify(schedulerManager).addJob(eq(job.getId()), eq(job.getHandlerName()), eq(job.getHandlerParam()), eq(job.getCronExpression()), eq(reqVO.getRetryCount()), eq(reqVO.getRetryInterval())); } }
@Nullable @Override public Message decode(@Nonnull RawMessage rawMessage) { final String msg = new String(rawMessage.getPayload(), charset); try (Timer.Context ignored = this.decodeTime.time()) { final ResolvableInetSocketAddress address = rawMessage.getRemoteAddress(); final InetSocketAddress remoteAddress; if (address == null) { remoteAddress = null; } else { remoteAddress = address.getInetSocketAddress(); } return parse(msg, remoteAddress == null ? null : remoteAddress.getAddress(), rawMessage.getTimestamp()); } }
@Test public void testDecodeStructuredIssue845() throws Exception { final Message message = codec.decode(buildRawMessage(STRUCTURED_ISSUE_845)); assertNotNull(message); assertEquals("User page 13 requested", message.getMessage()); assertEquals(new DateTime("2015-01-06T20:56:33.287Z", DateTimeZone.UTC), ((DateTime) message.getField("timestamp")).withZone(DateTimeZone.UTC)); assertEquals("app-1", message.getField("source")); assertEquals(6, message.getField("level")); assertEquals("local7", message.getField("facility")); assertEquals("::ffff:132.123.15.30", message.getField("ip")); assertEquals("{c.corp.Handler}", message.getField("logger")); assertEquals("4ot7", message.getField("session")); assertEquals("user@example.com", message.getField("user")); assertEquals("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.2.5 (KHTML, like Gecko) Version/7.1.2 Safari/537.85.11", message.getField("user-agent")); assertEquals("app", message.getField("application_name")); assertEquals(23, message.getField("facility_num")); }