focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static Sensor createTaskSensor(final String threadId, final StreamsMetricsImpl streamsMetrics) { return invocationRateAndCountSensor( threadId, CREATE_TASK, CREATE_TASK_RATE_DESCRIPTION, CREATE_TASK_TOTAL_DESCRIPTION, RecordingLevel.INFO, streamsMetrics ); }
@Test public void shouldGetCreateTaskSensor() { final String operation = "task-created"; final String totalDescription = "The total number of newly created tasks"; final String rateDescription = "The average per-second number of newly created tasks"; when(streamsMetrics.threadLevelSensor(THREAD_ID, operation, RecordingLevel.INFO)).thenReturn(expectedSensor); when(streamsMetrics.threadLevelTagMap(THREAD_ID)).thenReturn(tagMap); try (final MockedStatic<StreamsMetricsImpl> streamsMetricsStaticMock = mockStatic(StreamsMetricsImpl.class)) { final Sensor sensor = ThreadMetrics.createTaskSensor(THREAD_ID, streamsMetrics); streamsMetricsStaticMock.verify( () -> StreamsMetricsImpl.addInvocationRateAndCountToSensor( expectedSensor, THREAD_LEVEL_GROUP, tagMap, operation, rateDescription, totalDescription ) ); assertThat(sensor, is(expectedSensor)); } }
public List<Pair<Long /* storeId */, Integer /* leaderCount */>> findLazyWorkerStores(final Collection<Long> storeCandidates) { if (storeCandidates == null || storeCandidates.isEmpty()) { return Collections.emptyList(); } final Set<Map.Entry<Long, Set<Long>>> values = this.leaderTable.entrySet(); if (values.isEmpty()) { return Collections.emptyList(); } final Map.Entry<Long, Set<Long>> lazyWorker = Collections.min(values, (o1, o2) -> { final int o1Val = o1.getValue() == null ? 0 : o1.getValue().size(); final int o2Val = o2.getValue() == null ? 0 : o2.getValue().size(); return Integer.compare(o1Val, o2Val); }); final int minLeaderCount = lazyWorker.getValue().size(); final List<Pair<Long, Integer>> lazyCandidates = Lists.newArrayList(); for (final Long storeId : storeCandidates) { final Set<Long> regionTable = this.leaderTable.get(storeId); int leaderCount = regionTable == null ? 0 : regionTable.size(); if (leaderCount <= minLeaderCount) { lazyCandidates.add(Pair.of(storeId, leaderCount)); } } return lazyCandidates; }
@Test public void findLazyWorkerStoresTest() { final ClusterStatsManager manager = ClusterStatsManager.getInstance(1); manager.addOrUpdateLeader(10, 101); manager.addOrUpdateLeader(10, 102); manager.addOrUpdateLeader(10, 103); manager.addOrUpdateLeader(11, 104); manager.addOrUpdateLeader(11, 105); manager.addOrUpdateLeader(12, 106); final Collection<Long> storeCandidates = Lists.newArrayList(); storeCandidates.add(10L); storeCandidates.add(11L); storeCandidates.add(12L); List<Pair<Long, Integer>> result = manager.findLazyWorkerStores(storeCandidates); Assert.assertNotNull(result); Assert.assertEquals(1, result.size()); Assert.assertEquals(Long.valueOf(12), result.get(0).getKey()); Assert.assertEquals(Integer.valueOf(1), result.get(0).getValue()); manager.addOrUpdateLeader(10, 105); result = manager.findLazyWorkerStores(storeCandidates); Assert.assertNotNull(result); Assert.assertEquals(2, result.size()); Assert.assertEquals(Integer.valueOf(1), result.get(0).getValue()); }
public Future<Void> reconcile(boolean isOpenShift, ImagePullPolicy imagePullPolicy, List<LocalObjectReference> imagePullSecrets, Clock clock) { return serviceAccount() .compose(i -> certificatesSecret(clock)) .compose(i -> networkPolicy()) .compose(i -> deployment(isOpenShift, imagePullPolicy, imagePullSecrets)) .compose(i -> waitForDeploymentReadiness()); }
@Test public void reconcileWithDisabledExporter(VertxTestContext context) { ResourceOperatorSupplier supplier = ResourceUtils.supplierWithMocks(false); ServiceAccountOperator mockSaOps = supplier.serviceAccountOperations; ArgumentCaptor<ServiceAccount> saCaptor = ArgumentCaptor.forClass(ServiceAccount.class); when(mockSaOps.reconcile(any(), eq(NAMESPACE), eq(KafkaExporterResources.componentName(NAME)), saCaptor.capture())).thenReturn(Future.succeededFuture()); SecretOperator mockSecretOps = supplier.secretOperations; ArgumentCaptor<Secret> secretCaptor = ArgumentCaptor.forClass(Secret.class); when(mockSecretOps.reconcile(any(), eq(NAMESPACE), eq(KafkaExporterResources.secretName(NAME)), secretCaptor.capture())).thenReturn(Future.succeededFuture()); NetworkPolicyOperator mockNetPolicyOps = supplier.networkPolicyOperator; ArgumentCaptor<NetworkPolicy> netPolicyCaptor = ArgumentCaptor.forClass(NetworkPolicy.class); when(mockNetPolicyOps.reconcile(any(), eq(NAMESPACE), eq(KafkaExporterResources.componentName(NAME)), netPolicyCaptor.capture())).thenReturn(Future.succeededFuture()); DeploymentOperator mockDepOps = supplier.deploymentOperations; ArgumentCaptor<Deployment> depCaptor = ArgumentCaptor.forClass(Deployment.class); when(mockDepOps.reconcile(any(), eq(NAMESPACE), eq(KafkaExporterResources.componentName(NAME)), depCaptor.capture())).thenReturn(Future.succeededFuture()); when(mockDepOps.waitForObserved(any(), eq(NAMESPACE), eq(KafkaExporterResources.componentName(NAME)), anyLong(), anyLong())).thenReturn(Future.succeededFuture()); when(mockDepOps.readiness(any(), eq(NAMESPACE), eq(KafkaExporterResources.componentName(NAME)), anyLong(), anyLong())).thenReturn(Future.succeededFuture()); KafkaExporterReconciler reconciler = new KafkaExporterReconciler( Reconciliation.DUMMY_RECONCILIATION, ResourceUtils.dummyClusterOperatorConfig(), supplier, KAFKA, VERSIONS, CLUSTER_CA ); Checkpoint async = context.checkpoint(); reconciler.reconcile(false, null, null, Clock.systemUTC()) .onComplete(context.succeeding(v -> context.verify(() -> { assertThat(saCaptor.getAllValues().size(), is(1)); assertThat(saCaptor.getValue(), is(nullValue())); assertThat(secretCaptor.getAllValues().size(), is(1)); assertThat(secretCaptor.getAllValues().get(0), is(nullValue())); assertThat(netPolicyCaptor.getAllValues().size(), is(1)); assertThat(netPolicyCaptor.getValue(), is(nullValue())); assertThat(depCaptor.getAllValues().size(), is(1)); assertThat(depCaptor.getValue(), is(nullValue())); async.flag(); }))); }
@Override public Hotel getHotel(String id) { return hotelRepository.findById(id).orElseThrow( () -> new ResourceNotFoundException("Hotel no encontrado con el id: " + id) ); }
@Test void testGetHotelNotFound() { // Given (Dado) String hotelId = "1"; // Cuando se llame hotelRepository.findById con hotelId, retorna Optional.empty() when(hotelRepository.findById(hotelId)).thenReturn(Optional.empty()); // When, Then (Cuando, Entonces) // Verifica que al llamar hotelService.getHotel con hotelId, se lance ResourceNotFoundException ResourceNotFoundException exception = org.junit.jupiter.api.Assertions.assertThrows( ResourceNotFoundException.class, () -> hotelService.getHotel(hotelId) ); // Verifica que el mensaje de la excepción sea el esperado assertThat(exception.getMessage()).isEqualTo("Hotel no encontrado con el id: " + hotelId); }
public void removeExpiredProducers(long currentTimeMs) { producers.entrySet().removeIf(entry -> isProducerExpired(currentTimeMs, entry.getValue())); producerIdCount = producers.size(); verificationStates.entrySet().removeIf(entry -> (currentTimeMs - entry.getValue().timestamp()) >= producerStateManagerConfig.producerIdExpirationMs() ); }
@Test public void testProducerStateAfterFencingAbortMarker() { appendClientEntry(stateManager, producerId, epoch, defaultSequence, 0L, true); appendEndTxnMarker(stateManager, producerId, (short) (epoch + 1), ControlRecordType.ABORT, 1L); ProducerStateEntry lastEntry = getLastEntryOrElseThrownByProducerId(stateManager, producerId); assertEquals(OptionalLong.empty(), lastEntry.currentTxnFirstOffset()); assertEquals(-1, lastEntry.lastDataOffset()); assertEquals(-1, lastEntry.firstDataOffset()); // The producer should not be expired because we want to preserve fencing epochs stateManager.removeExpiredProducers(time.milliseconds()); assertDoesNotThrow(() -> getLastEntryOrElseThrownByProducerId(stateManager, producerId)); }
@NonNull @Override public String getId() { return ID; }
@Test public void getBitbucketScmWithoutApiUrlParam() throws IOException, UnirestException { new RequestBuilder(baseUrl) .crumb(crumb) .status(400) .jwtToken(getJwtToken(j.jenkins, authenticatedUser.getId(), authenticatedUser.getId())) .post("/organizations/jenkins/scm/"+BitbucketServerScm.ID+"/") .build(Map.class); }
public final void isLessThan(int other) { isLessThan((long) other); }
@Test public void isLessThan_int_strictly() { expectFailureWhenTestingThat(2L).isLessThan(1); }
public abstract Task<Map<K, Try<T>>> taskForBatch(G group, Set<K> keys);
@Test public void testBatchAndSingletonWithTimeout() { RecordingTaskStrategy<Integer, Integer, String> strategy = new RecordingTaskStrategy<Integer, Integer, String>(key -> Success.of(String.valueOf(key)), key -> key % 2) { @Override public Task<Map<Integer, Try<String>>> taskForBatch(Integer group, Set<Integer> keys) { return super.taskForBatch(group, keys).flatMap(map -> delayedValue(map, 250, TimeUnit.MILLISECONDS)); } }; _batchingSupport.registerStrategy(strategy); Task<String> task = Task.par(strategy.batchable(0), strategy.batchable(1).withTimeout(10, TimeUnit.MILLISECONDS).recover("toExceptionName", e -> e.getClass().getName()), strategy.batchable(2)) .map("concat", (s0, s1, s2) -> s0 + s1 + s2); String result = runAndWait("TestTaskBatchingStrategy.testBatchAndSingletonWithTimeout", task); assertEquals(result, "0java.util.concurrent.TimeoutException2"); assertTrue(strategy.getClassifiedKeys().contains(0)); assertTrue(strategy.getClassifiedKeys().contains(1)); assertTrue(strategy.getClassifiedKeys().contains(2)); assertEquals(strategy.getExecutedBatches().size(), 1); assertEquals(strategy.getExecutedSingletons().size(), 1); }
@VisibleForTesting void validateRoleDuplicate(String name, String code, Long id) { // 0. 超级管理员,不允许创建 if (RoleCodeEnum.isSuperAdmin(code)) { throw exception(ROLE_ADMIN_CODE_ERROR, code); } // 1. 该 name 名字被其它角色所使用 RoleDO role = roleMapper.selectByName(name); if (role != null && !role.getId().equals(id)) { throw exception(ROLE_NAME_DUPLICATE, name); } // 2. 是否存在相同编码的角色 if (!StringUtils.hasText(code)) { return; } // 该 code 编码被其它角色所使用 role = roleMapper.selectByCode(code); if (role != null && !role.getId().equals(id)) { throw exception(ROLE_CODE_DUPLICATE, code); } }
@Test public void testValidateRoleDuplicate_codeDuplicate() { // mock 数据 RoleDO roleDO = randomPojo(RoleDO.class, o -> o.setCode("code")); roleMapper.insert(roleDO); // 准备参数 String code = "code"; // 调用,并断言异常 assertServiceException(() -> roleService.validateRoleDuplicate(randomString(), code, null), ROLE_CODE_DUPLICATE, code); }
public static void outputLongDecimal(LongDecimalWithOverflowState state, BlockBuilder out) { Slice decimal = state.getLongDecimal(); if (decimal == null) { out.appendNull(); } else { if (state.getOverflow() != 0) { throwOverflowException(); } throwIfOverflows(decimal); LONG_DECIMAL_TYPE.writeSlice(out, decimal); } }
@Test(expectedExceptions = ArithmeticException.class) public void testOverflowOnOutput() { addToState(state, TWO.pow(126)); addToState(state, TWO.pow(126)); assertEquals(state.getOverflow(), 1); DecimalSumAggregation.outputLongDecimal(state, new VariableWidthBlockBuilder(null, 10, 100)); }
public final void hasSize(int expectedSize) { checkArgument(expectedSize >= 0, "expectedSize (%s) must be >= 0", expectedSize); check("size()").that(checkNotNull(actual).size()).isEqualTo(expectedSize); }
@Test public void hasSizeZero() { assertThat(ImmutableMap.of()).hasSize(0); }
public DMNContext populateContextWith(Map<String, Object> json) { for (Entry<String, Object> kv : json.entrySet()) { InputDataNode idn = model.getInputByName(kv.getKey()); if (idn != null) { processInputDataNode(kv, idn); } else { DecisionNode dn = model.getDecisionByName(kv.getKey()); if (dn != null) { processDecisionNode(kv, dn); } else { LOG.debug("The key {} was not a InputData nor a Decision to override, setting it as-is.", kv.getKey()); context.set(kv.getKey(), kv.getValue()); } } } return context; }
@Test void oneOfEachType() throws Exception { DMNRuntime runtime = createRuntime("OneOfEachType.dmn", DMNRuntimeTypesTest.class); final DMNModel dmnModel = runtime.getModel("http://www.trisotech.com/definitions/_4f5608e9-4d74-4c22-a47e-ab657257fc9c", "OneOfEachType"); assertThat(dmnModel).isNotNull(); assertThat(dmnModel.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnModel.getMessages())).isFalse(); DMNContext context = runtime.newContext(); final String JSON = "{\n" + " \"InputBoolean\": true,\n" + " \"InputDTDuration\": \"P1D\",\n" + " \"InputDate\": \"2020-04-02\",\n" + " \"InputDateAndTime\": \"2020-04-02T09:00:00\",\n" + " \"InputNumber\": 1,\n" + " \"InputString\": \"John Doe\",\n" + " \"InputTime\": \"09:00\",\n" + " \"InputYMDuration\": \"P1M\"\n" + "}"; new DynamicDMNContextBuilder(context, dmnModel).populateContextWith(readJSON(JSON)); final DMNResult dmnResult = runtime.evaluateAll(dmnModel, context); LOG.debug("{}", dmnResult); assertThat(dmnResult.hasErrors()).as(DMNRuntimeUtil.formatMessages(dmnResult.getMessages())).isFalse(); assertThat(dmnResult.getDecisionResultByName("DecisionString").getResult()).isEqualTo("Hello, John Doe"); assertThat(dmnResult.getDecisionResultByName("DecisionNumber").getResult()).isEqualTo(new BigDecimal(2)); assertThat(dmnResult.getDecisionResultByName("DecisionBoolean").getResult()).isEqualTo(Boolean.FALSE); assertThat(dmnResult.getDecisionResultByName("DecisionDTDuration").getResult()).isEqualTo(Duration.parse("P2D")); assertThat(dmnResult.getDecisionResultByName("DecisionYMDuration").getResult()).isEqualTo(ComparablePeriod.parse("P2M")); assertThat(dmnResult.getDecisionResultByName("DecisionDateAndTime").getResult()).isEqualTo(LocalDateTime.of(2020, 4, 2, 10, 0)); assertThat(dmnResult.getDecisionResultByName("DecisionDate").getResult()).isEqualTo(LocalDate.of(2020, 4, 3)); assertThat(dmnResult.getDecisionResultByName("DecisionTime").getResult()).isEqualTo(LocalTime.of(10, 0)); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldChooseNonVarargWithNullValuesOfDifferingSchemas() { // Given: givenFunctions( function(EXPECTED, -1, STRING, INT), function(OTHER, 0, STRING_VARARGS) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(Arrays.asList(SqlArgument.of(null, null), SqlArgument.of(null, null))); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
@Override public boolean dropTable(TableIdentifier identifier, boolean purge) { if (!isValidIdentifier(identifier)) { return false; } String database = identifier.namespace().level(0); TableOperations ops = newTableOps(identifier); TableMetadata lastMetadata = null; if (purge) { try { lastMetadata = ops.current(); } catch (NotFoundException e) { LOG.warn( "Failed to load table metadata for table: {}, continuing drop without purge", identifier, e); } } try { clients.run(client -> { client.dropTable(database, identifier.name(), false /* do not delete data */, false /* throw NoSuchObjectException if the table doesn't exist */); return null; }); if (purge && lastMetadata != null) { CatalogUtil.dropTableData(ops.io(), lastMetadata); } LOG.info("Dropped table: {}", identifier); return true; } catch (NoSuchTableException | NoSuchObjectException e) { LOG.info("Skipping drop, table does not exist: {}", identifier, e); return false; } catch (TException e) { throw new RuntimeException("Failed to drop " + identifier, e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException("Interrupted in call to dropTable", e); } }
@Test public void testCreateTableCustomSortOrder() throws Exception { Schema schema = getTestSchema(); PartitionSpec spec = PartitionSpec.builderFor(schema).bucket("data", 16).build(); SortOrder order = SortOrder.builderFor(schema).asc("id", NULLS_FIRST).build(); TableIdentifier tableIdent = TableIdentifier.of(DB_NAME, "tbl"); try { Table table = catalog.buildTable(tableIdent, schema) .withPartitionSpec(spec) .withSortOrder(order) .create(); SortOrder sortOrder = table.sortOrder(); assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(1); assertThat(sortOrder.fields()).as("Order must have 1 field").hasSize(1); assertThat(sortOrder.fields().get(0).direction()).as("Direction must match ").isEqualTo(ASC); assertThat(sortOrder.fields().get(0).nullOrder()) .as("Null order must match ") .isEqualTo(NULLS_FIRST); Transform<?, ?> transform = Transforms.identity(Types.IntegerType.get()); assertThat(sortOrder.fields().get(0).transform()) .as("Transform must match") .isEqualTo(transform); assertThat(hmsTableParameters()) .containsEntry(DEFAULT_SORT_ORDER, SortOrderParser.toJson(table.sortOrder())); } finally { catalog.dropTable(tableIdent); } }
public abstract Projection difference(Projection other);
@Test void testDifference() { assertThat( Projection.of(new int[] {4, 1, 0, 3, 2}) .difference(Projection.of(new int[] {4, 2}))) .isEqualTo(Projection.of(new int[] {1, 0, 2})); assertThat( Projection.of( new int[][] { new int[] {4}, new int[] {1, 3}, new int[] {0}, new int[] {3, 1}, new int[] {2} }) .difference(Projection.of(new int[] {4, 2}))) .isEqualTo( Projection.of( new int[][] {new int[] {1, 3}, new int[] {0}, new int[] {2, 1}})); assertThatThrownBy( () -> Projection.of(new int[] {1, 2, 3, 4}) .difference( Projection.of( new int[][] { new int[] {2}, new int[] {3, 4} }))) .isInstanceOf(IllegalArgumentException.class); }
@Override public String getSessionId() { return sessionID; }
@Test public void testDeleteConfigRequestWithRunningDatastoreIdDurationWithChunkedFraming() { log.info("Starting delete-config async"); assertNotNull("Incorrect sessionId", session3.getSessionId()); try { assertFalse("NETCONF delete-config command failed", session3.deleteConfig(RUNNING)); } catch (NetconfException e) { e.printStackTrace(); fail("NETCONF delete-config test failed: " + e.getMessage()); } log.info("Finishing delete-config async"); }
public static <K, V> AsMap<K, V> asMap() { return new AsMap<>(false); }
@Test @Category({ValidatesRunner.class}) public void testMapSideInputWithNonDeterministicKeyCoder() { final PCollectionView<Map<String, Integer>> view = pipeline .apply( "CreateSideInput", Create.of(KV.of("a", 1), KV.of("b", 3)) .withCoder(KvCoder.of(new NonDeterministicStringCoder(), VarIntCoder.of()))) .apply(View.asMap()); PCollection<KV<String, Integer>> output = pipeline .apply("CreateMainInput", Create.of("apple", "banana", "blackberry")) .apply( "OutputSideInputs", ParDo.of( new DoFn<String, KV<String, Integer>>() { @ProcessElement public void processElement(ProcessContext c) { c.output( KV.of( c.element(), c.sideInput(view).get(c.element().substring(0, 1)))); } }) .withSideInputs(view)); PAssert.that(output) .containsInAnyOrder(KV.of("apple", 1), KV.of("banana", 3), KV.of("blackberry", 3)); pipeline.run(); }
@Override public LinkedHashMap<String, String> generatePartValues(Row in) throws Exception { LinkedHashMap<String, String> partSpec = new LinkedHashMap<>(); for (int i = 0; i < partitionIndexes.length; i++) { int index = partitionIndexes[i]; Object field = in.getField(index); String partitionValue = field != null ? field.toString() : null; if (partitionValue == null || "".equals(partitionValue)) { partitionValue = defaultPartValue; } partSpec.put(partitionColumns[i], partitionValue); } return partSpec; }
@Test void testComputePartition() throws Exception { RowPartitionComputer computer = new RowPartitionComputer( "myDefaultname", new String[] {"f1", "p1", "p2", "f2"}, new String[] {"p1", "p2"}); assertThat(generatePartitionPath(computer.generatePartValues(Row.of(1, 2, 3, 4)))) .isEqualTo("p1=2/p2=3/"); assertThat(generatePartitionPath(computer.generatePartValues(Row.of(1, null, 3, 4)))) .isEqualTo("p1=myDefaultname/p2=3/"); }
public static RunnerApi.Pipeline toProto(Pipeline pipeline) { return toProto(pipeline, SdkComponents.create(pipeline.getOptions())); }
@Test public void testRequirements() { Pipeline pipeline = Pipeline.create(); pipeline.apply(Create.of(1, 2, 3)).apply(ParDo.of(new DoFnRequiringStableInput())); RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(pipeline, false); assertThat( pipelineProto.getRequirementsList(), hasItem(ParDoTranslation.REQUIRES_STABLE_INPUT_URN)); }
public Future<KafkaVersionChange> reconcile() { return getPods() .compose(this::detectToAndFromVersions) .compose(i -> prepareVersionChange()); }
@Test public void testExistingClusterWithPodWithoutAnnotations(VertxTestContext context) { VersionChangeCreator vcc = mockVersionChangeCreator( mockKafka(VERSIONS.defaultVersion().version(), VERSIONS.defaultVersion().metadataVersion(), VERSIONS.defaultVersion().metadataVersion()), mockRos(mockUniformPods(null)) ); Checkpoint async = context.checkpoint(); vcc.reconcile().onComplete(context.failing(c -> context.verify(() -> { assertThat(c, is(instanceOf(KafkaUpgradeException.class))); assertThat(c.getMessage(), is("Kafka Pods exist, but do not contain the strimzi.io/kafka-version annotation to detect their version. Kafka upgrade cannot be detected.")); async.flag(); }))); }
@Override public void merge(ColumnStatisticsObj aggregateColStats, ColumnStatisticsObj newColStats) { LOG.debug("Merging statistics: [aggregateColStats:{}, newColStats: {}]", aggregateColStats, newColStats); DateColumnStatsDataInspector aggregateData = dateInspectorFromStats(aggregateColStats); DateColumnStatsDataInspector newData = dateInspectorFromStats(newColStats); Date lowValue = mergeLowValue(getLowValue(aggregateData), getLowValue(newData)); if (lowValue != null) { aggregateData.setLowValue(lowValue); } Date highValue = mergeHighValue(getHighValue(aggregateData), getHighValue(newData)); if (highValue != null) { aggregateData.setHighValue(highValue); } aggregateData.setNumNulls(mergeNumNulls(aggregateData.getNumNulls(), newData.getNumNulls())); NumDistinctValueEstimator oldNDVEst = aggregateData.getNdvEstimator(); NumDistinctValueEstimator newNDVEst = newData.getNdvEstimator(); List<NumDistinctValueEstimator> ndvEstimatorsList = Arrays.asList(oldNDVEst, newNDVEst); aggregateData.setNumDVs(mergeNumDistinctValueEstimator(aggregateColStats.getColName(), ndvEstimatorsList, aggregateData.getNumDVs(), newData.getNumDVs())); aggregateData.setNdvEstimator(ndvEstimatorsList.get(0)); KllHistogramEstimator oldKllEst = aggregateData.getHistogramEstimator(); KllHistogramEstimator newKllEst = newData.getHistogramEstimator(); aggregateData.setHistogramEstimator(mergeHistogramEstimator(aggregateColStats.getColName(), oldKllEst, newKllEst)); aggregateColStats.getStatsData().setDateStats(aggregateData); }
@Test public void testMergeNonNullValues() { ColumnStatisticsObj aggrObj = createColumnStatisticsObj(new ColStatsBuilder<>(Date.class) .low(DATE_2) .high(DATE_2) .numNulls(2) .numDVs(1) .hll(DATE_2.getDaysSinceEpoch()) .kll(DATE_2.getDaysSinceEpoch()) .build()); ColumnStatisticsObj newObj = createColumnStatisticsObj(new ColStatsBuilder<>(Date.class) .low(DATE_3) .high(DATE_3) .numNulls(3) .numDVs(1) .hll(DATE_3.getDaysSinceEpoch()) .kll(DATE_3.getDaysSinceEpoch()) .build()); merger.merge(aggrObj, newObj); newObj = createColumnStatisticsObj(new ColStatsBuilder<>(Date.class) .low(DATE_1) .high(DATE_1) .numNulls(1) .numDVs(1) .hll(DATE_1.getDaysSinceEpoch(), DATE_1.getDaysSinceEpoch()) .kll(DATE_1.getDaysSinceEpoch(), DATE_1.getDaysSinceEpoch()) .build()); merger.merge(aggrObj, newObj); ColumnStatisticsData expectedColumnStatisticsData = new ColStatsBuilder<>(Date.class) .low(DATE_1) .high(DATE_3) .numNulls(6) .numDVs(3) .hll(DATE_2.getDaysSinceEpoch(), DATE_3.getDaysSinceEpoch(), DATE_1.getDaysSinceEpoch(), DATE_1.getDaysSinceEpoch()) .kll(DATE_2.getDaysSinceEpoch(), DATE_3.getDaysSinceEpoch(), DATE_1.getDaysSinceEpoch(), DATE_1.getDaysSinceEpoch()) .build(); assertEquals(expectedColumnStatisticsData, aggrObj.getStatsData()); }
@Udf(description = "Returns the cotangent of an INT value") public Double cot( @UdfParameter( value = "value", description = "The value in radians to get the cotangent of." ) final Integer value ) { return cot(value == null ? null : value.doubleValue()); }
@Test public void shouldHandlePositive() { assertThat(udf.cot(0.43), closeTo(2.1804495406685085, 0.000000000000001)); assertThat(udf.cot(Math.PI), closeTo(-8.165619676597685E15, 0.000000000000001)); assertThat(udf.cot(Math.PI * 2), closeTo(-4.0828098382988425E15, 0.000000000000001)); assertThat(udf.cot(6), closeTo(-3.436353004180128, 0.000000000000001)); assertThat(udf.cot(6L), closeTo(-3.436353004180128, 0.000000000000001)); }
public static JvmMetrics initSingleton(String processName, String sessionId) { return Singleton.INSTANCE.init(processName, sessionId); }
@Test public void testJvmMetricsSingletonWithDifferentProcessNames() { final String process1Name = "process1"; JvmMetrics jvmMetrics1 = org.apache.hadoop.metrics2.source.JvmMetrics .initSingleton(process1Name, null); final String process2Name = "process2"; JvmMetrics jvmMetrics2 = org.apache.hadoop.metrics2.source.JvmMetrics .initSingleton(process2Name, null); Assert.assertEquals("initSingleton should return the singleton instance", jvmMetrics1, jvmMetrics2); Assert.assertEquals("unexpected process name of the singleton instance", process1Name, jvmMetrics1.processName); Assert.assertEquals("unexpected process name of the singleton instance", process1Name, jvmMetrics2.processName); }
public static void checkValidTimeoutSecond(long timeoutSecond, int maxLoadTimeoutSecond, int minLoadTimeOutSecond) throws AnalysisException { if (timeoutSecond > maxLoadTimeoutSecond || timeoutSecond < minLoadTimeOutSecond) { ErrorReport.reportAnalysisException(ErrorCode.ERR_INVALID_VALUE, "timeout", timeoutSecond, String.format("between %d and %d seconds", minLoadTimeOutSecond, maxLoadTimeoutSecond)); } }
@Test public void testCheckValidTimeoutSecond() { ExceptionChecker.expectThrowsWithMsg(AnalysisException.class, "Invalid timeout: '1'. Expected values should be between 2 and 3 seconds", () -> GlobalTransactionMgr.checkValidTimeoutSecond(1, 3, 2)); }
public static <C> AsyncBuilder<C> builder() { return new AsyncBuilder<>(); }
@Test void equalsHashCodeAndToStringWork() { Target<TestInterfaceAsync> t1 = new HardCodedTarget<>(TestInterfaceAsync.class, "http://localhost:8080"); Target<TestInterfaceAsync> t2 = new HardCodedTarget<>(TestInterfaceAsync.class, "http://localhost:8888"); Target<OtherTestInterfaceAsync> t3 = new HardCodedTarget<>(OtherTestInterfaceAsync.class, "http://localhost:8080"); TestInterfaceAsync i1 = AsyncFeign.builder().target(t1); TestInterfaceAsync i2 = AsyncFeign.builder().target(t1); TestInterfaceAsync i3 = AsyncFeign.builder().target(t2); OtherTestInterfaceAsync i4 = AsyncFeign.builder().target(t3); assertThat(i1).isEqualTo(i2).isNotEqualTo(i3).isNotEqualTo(i4); assertThat(i1.hashCode()).isEqualTo(i2.hashCode()).isNotEqualTo(i3.hashCode()) .isNotEqualTo(i4.hashCode()); assertThat(i1.toString()).isEqualTo(i2.toString()).isNotEqualTo(i3.toString()) .isNotEqualTo(i4.toString()); assertThat(t1).isNotEqualTo(i1); assertThat(t1.hashCode()).isEqualTo(i1.hashCode()); assertThat(t1.toString()).isEqualTo(i1.toString()); }
public static String bestMatch(Collection<String> supported, String header) { return bestMatch(supported.stream(), header); }
@Test public void testBestMatchForForNonNullMatch() { Assert.assertNotEquals(MIMEParse.bestMatch(Arrays.asList(new String[] { JSON_TYPE, PSON_TYPE }), JSON_PSON_HTML_HEADER), EMPTY_TYPE); }
public void validateDocumentGraph(List<SDDocumentType> documents) { for (SDDocumentType document : documents) { validateRoot(document); } }
@Test void complex_dag_is_allowed() { Schema grandfather = createSearchWithName("grandfather"); Schema father = createSearchWithName("father", grandfather); Schema mother = createSearchWithName("mother", grandfather); createDocumentReference(father, mother, "wife_ref"); Schema son = createSearchWithName("son", father, mother); Schema daughter = createSearchWithName("daughter", father, mother); createDocumentReference(daughter, son, "brother_ref"); Schema randomGuy1 = createSearchWithName("randomguy1"); Schema randomGuy2 = createSearchWithName("randomguy2"); createDocumentReference(randomGuy1, mother, "secret_ref"); DocumentGraphValidator validator = new DocumentGraphValidator(); validator.validateDocumentGraph(documentListOf(son, father, grandfather, son, daughter, randomGuy1, randomGuy2)); }
@VisibleForTesting static JibContainerBuilder processCommonConfiguration( RawConfiguration rawConfiguration, InferredAuthProvider inferredAuthProvider, ProjectProperties projectProperties) throws InvalidFilesModificationTimeException, InvalidAppRootException, IncompatibleBaseImageJavaVersionException, IOException, InvalidImageReferenceException, InvalidContainerizingModeException, MainClassInferenceException, InvalidPlatformException, InvalidContainerVolumeException, InvalidWorkingDirectoryException, InvalidCreationTimeException, ExtraDirectoryNotFoundException { // Create and configure JibContainerBuilder ModificationTimeProvider modificationTimeProvider = createModificationTimeProvider(rawConfiguration.getFilesModificationTime()); JavaContainerBuilder javaContainerBuilder = getJavaContainerBuilderWithBaseImage( rawConfiguration, projectProperties, inferredAuthProvider) .setAppRoot(getAppRootChecked(rawConfiguration, projectProperties)) .setModificationTimeProvider(modificationTimeProvider); JibContainerBuilder jibContainerBuilder = projectProperties.createJibContainerBuilder( javaContainerBuilder, getContainerizingModeChecked(rawConfiguration, projectProperties)); jibContainerBuilder .setFormat(rawConfiguration.getImageFormat()) .setPlatforms(getPlatformsSet(rawConfiguration)) .setEntrypoint(computeEntrypoint(rawConfiguration, projectProperties, jibContainerBuilder)) .setProgramArguments(rawConfiguration.getProgramArguments().orElse(null)) .setEnvironment(rawConfiguration.getEnvironment()) .setExposedPorts(Ports.parse(rawConfiguration.getPorts())) .setVolumes(getVolumesSet(rawConfiguration)) .setLabels(rawConfiguration.getLabels()) .setUser(rawConfiguration.getUser().orElse(null)) .setCreationTime(getCreationTime(rawConfiguration.getCreationTime(), projectProperties)); getWorkingDirectoryChecked(rawConfiguration) .ifPresent(jibContainerBuilder::setWorkingDirectory); // Adds all the extra files. for (ExtraDirectoriesConfiguration extraDirectory : rawConfiguration.getExtraDirectories()) { Path from = extraDirectory.getFrom(); if (Files.exists(from)) { jibContainerBuilder.addFileEntriesLayer( JavaContainerBuilderHelper.extraDirectoryLayerConfiguration( from, AbsoluteUnixPath.get(extraDirectory.getInto()), extraDirectory.getIncludesList(), extraDirectory.getExcludesList(), rawConfiguration.getExtraDirectoryPermissions(), modificationTimeProvider)); } else if (!from.endsWith(DEFAULT_JIB_DIR)) { throw new ExtraDirectoryNotFoundException(from.toString(), from.toString()); } } return jibContainerBuilder; }
@Test public void testEntrypoint_warningOnJvmFlags() throws InvalidImageReferenceException, IOException, MainClassInferenceException, InvalidAppRootException, InvalidWorkingDirectoryException, InvalidPlatformException, InvalidContainerVolumeException, IncompatibleBaseImageJavaVersionException, NumberFormatException, InvalidContainerizingModeException, InvalidFilesModificationTimeException, InvalidCreationTimeException, ExtraDirectoryNotFoundException { when(rawConfiguration.getEntrypoint()) .thenReturn(Optional.of(Arrays.asList("custom", "entrypoint"))); when(rawConfiguration.getJvmFlags()).thenReturn(Collections.singletonList("jvmFlag")); ContainerBuildPlan buildPlan = processCommonConfiguration(); assertThat(buildPlan.getEntrypoint()).containsExactly("custom", "entrypoint").inOrder(); verify(projectProperties) .log( LogEvent.info( "mainClass, extraClasspath, jvmFlags, and expandClasspathDependencies are ignored " + "when entrypoint is specified")); }
public abstract HttpHeaders set(String name, Object value);
@Test public void testSetNullHeaderValueValidate() { final HttpHeaders headers = new DefaultHttpHeaders(true); assertThrows(NullPointerException.class, new Executable() { @Override public void execute() { headers.set(of("test"), (CharSequence) null); } }); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() == ChatMessageType.GAMEMESSAGE || event.getType() == ChatMessageType.SPAM) { String message = Text.removeTags(event.getMessage()); Matcher dodgyCheckMatcher = DODGY_CHECK_PATTERN.matcher(message); Matcher dodgyProtectMatcher = DODGY_PROTECT_PATTERN.matcher(message); Matcher dodgyBreakMatcher = DODGY_BREAK_PATTERN.matcher(message); Matcher bindingNecklaceCheckMatcher = BINDING_CHECK_PATTERN.matcher(message); Matcher bindingNecklaceUsedMatcher = BINDING_USED_PATTERN.matcher(message); Matcher ringOfForgingCheckMatcher = RING_OF_FORGING_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryCheckMatcher = AMULET_OF_CHEMISTRY_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryUsedMatcher = AMULET_OF_CHEMISTRY_USED_PATTERN.matcher(message); Matcher amuletOfChemistryBreakMatcher = AMULET_OF_CHEMISTRY_BREAK_PATTERN.matcher(message); Matcher amuletOfBountyCheckMatcher = AMULET_OF_BOUNTY_CHECK_PATTERN.matcher(message); Matcher amuletOfBountyUsedMatcher = AMULET_OF_BOUNTY_USED_PATTERN.matcher(message); Matcher chronicleAddMatcher = CHRONICLE_ADD_PATTERN.matcher(message); Matcher chronicleUseAndCheckMatcher = CHRONICLE_USE_AND_CHECK_PATTERN.matcher(message); Matcher slaughterActivateMatcher = BRACELET_OF_SLAUGHTER_ACTIVATE_PATTERN.matcher(message); Matcher slaughterCheckMatcher = BRACELET_OF_SLAUGHTER_CHECK_PATTERN.matcher(message); Matcher expeditiousActivateMatcher = EXPEDITIOUS_BRACELET_ACTIVATE_PATTERN.matcher(message); Matcher expeditiousCheckMatcher = EXPEDITIOUS_BRACELET_CHECK_PATTERN.matcher(message); Matcher bloodEssenceCheckMatcher = BLOOD_ESSENCE_CHECK_PATTERN.matcher(message); Matcher bloodEssenceExtractMatcher = BLOOD_ESSENCE_EXTRACT_PATTERN.matcher(message); Matcher braceletOfClayCheckMatcher = BRACELET_OF_CLAY_CHECK_PATTERN.matcher(message); if (message.contains(RING_OF_RECOIL_BREAK_MESSAGE)) { notifier.notify(config.recoilNotification(), "Your Ring of Recoil has shattered"); } else if (dodgyBreakMatcher.find()) { notifier.notify(config.dodgyNotification(), "Your dodgy necklace has crumbled to dust."); updateDodgyNecklaceCharges(MAX_DODGY_CHARGES); } else if (dodgyCheckMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyCheckMatcher.group(1))); } else if (dodgyProtectMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyProtectMatcher.group(1))); } else if (amuletOfChemistryCheckMatcher.find()) { updateAmuletOfChemistryCharges(Integer.parseInt(amuletOfChemistryCheckMatcher.group(1))); } else if (amuletOfChemistryUsedMatcher.find()) { final String match = amuletOfChemistryUsedMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateAmuletOfChemistryCharges(charges); } else if (amuletOfChemistryBreakMatcher.find()) { notifier.notify(config.amuletOfChemistryNotification(), "Your amulet of chemistry has crumbled to dust."); updateAmuletOfChemistryCharges(MAX_AMULET_OF_CHEMISTRY_CHARGES); } else if (amuletOfBountyCheckMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyCheckMatcher.group(1))); } else if (amuletOfBountyUsedMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyUsedMatcher.group(1))); } else if (message.equals(AMULET_OF_BOUNTY_BREAK_TEXT)) { updateAmuletOfBountyCharges(MAX_AMULET_OF_BOUNTY_CHARGES); } else if (message.contains(BINDING_BREAK_TEXT)) { notifier.notify(config.bindingNotification(), BINDING_BREAK_TEXT); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateBindingNecklaceCharges(MAX_BINDING_CHARGES + 1); } else if (bindingNecklaceUsedMatcher.find()) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); if (equipment.contains(ItemID.BINDING_NECKLACE)) { updateBindingNecklaceCharges(getItemCharges(ItemChargeConfig.KEY_BINDING_NECKLACE) - 1); } } else if (bindingNecklaceCheckMatcher.find()) { final String match = bindingNecklaceCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateBindingNecklaceCharges(charges); } else if (ringOfForgingCheckMatcher.find()) { final String match = ringOfForgingCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateRingOfForgingCharges(charges); } else if (message.equals(RING_OF_FORGING_USED_TEXT) || message.equals(RING_OF_FORGING_VARROCK_PLATEBODY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player smelted with a Ring of Forging equipped. if (equipment == null) { return; } if (equipment.contains(ItemID.RING_OF_FORGING) && (message.equals(RING_OF_FORGING_USED_TEXT) || inventory.count(ItemID.IRON_ORE) > 1)) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_RING_OF_FORGING) - 1, 0, MAX_RING_OF_FORGING_CHARGES); updateRingOfForgingCharges(charges); } } else if (message.equals(RING_OF_FORGING_BREAK_TEXT)) { notifier.notify(config.ringOfForgingNotification(), "Your ring of forging has melted."); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateRingOfForgingCharges(MAX_RING_OF_FORGING_CHARGES + 1); } else if (chronicleAddMatcher.find()) { final String match = chronicleAddMatcher.group(1); if (match.equals("one")) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(match)); } } else if (chronicleUseAndCheckMatcher.find()) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(chronicleUseAndCheckMatcher.group(1))); } else if (message.equals(CHRONICLE_ONE_CHARGE_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else if (message.equals(CHRONICLE_EMPTY_TEXT) || message.equals(CHRONICLE_NO_CHARGES_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 0); } else if (message.equals(CHRONICLE_FULL_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1000); } else if (slaughterActivateMatcher.find()) { final String found = slaughterActivateMatcher.group(1); if (found == null) { updateBraceletOfSlaughterCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.slaughterNotification(), BRACELET_OF_SLAUGHTER_BREAK_TEXT); } else { updateBraceletOfSlaughterCharges(Integer.parseInt(found)); } } else if (slaughterCheckMatcher.find()) { updateBraceletOfSlaughterCharges(Integer.parseInt(slaughterCheckMatcher.group(1))); } else if (expeditiousActivateMatcher.find()) { final String found = expeditiousActivateMatcher.group(1); if (found == null) { updateExpeditiousBraceletCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.expeditiousNotification(), EXPEDITIOUS_BRACELET_BREAK_TEXT); } else { updateExpeditiousBraceletCharges(Integer.parseInt(found)); } } else if (expeditiousCheckMatcher.find()) { updateExpeditiousBraceletCharges(Integer.parseInt(expeditiousCheckMatcher.group(1))); } else if (bloodEssenceCheckMatcher.find()) { updateBloodEssenceCharges(Integer.parseInt(bloodEssenceCheckMatcher.group(1))); } else if (bloodEssenceExtractMatcher.find()) { updateBloodEssenceCharges(getItemCharges(ItemChargeConfig.KEY_BLOOD_ESSENCE) - Integer.parseInt(bloodEssenceExtractMatcher.group(1))); } else if (message.contains(BLOOD_ESSENCE_ACTIVATE_TEXT)) { updateBloodEssenceCharges(MAX_BLOOD_ESSENCE_CHARGES); } else if (braceletOfClayCheckMatcher.find()) { updateBraceletOfClayCharges(Integer.parseInt(braceletOfClayCheckMatcher.group(1))); } else if (message.equals(BRACELET_OF_CLAY_USE_TEXT) || message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN)) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player mined with a Bracelet of Clay equipped. if (equipment != null && equipment.contains(ItemID.BRACELET_OF_CLAY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); // Charge is not used if only 1 inventory slot is available when mining in Prifddinas boolean ignore = inventory != null && inventory.count() == 27 && message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN); if (!ignore) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_BRACELET_OF_CLAY) - 1, 0, MAX_BRACELET_OF_CLAY_CHARGES); updateBraceletOfClayCharges(charges); } } } else if (message.equals(BRACELET_OF_CLAY_BREAK_TEXT)) { notifier.notify(config.braceletOfClayNotification(), "Your bracelet of clay has crumbled to dust"); updateBraceletOfClayCharges(MAX_BRACELET_OF_CLAY_CHARGES); } } }
@Test public void testChemistryUsed2() { ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", USED_AMULET_OF_CHEMISTRY_2_DOSES, "", 0); itemChargePlugin.onChatMessage(chatMessage); verify(configManager).setRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_AMULET_OF_CHEMISTRY, 1); }
@Nonnull public <T> T getInstance(@Nonnull Class<T> type) { return getInstance(new Key<>(type)); }
@Test public void whenArrayRequested_mayReturnMultiplePlugins() throws Exception { MultiThing[] multiThings = injector.getInstance(MultiThing[].class); // X comes first because it has a higher priority assertThat(classesOf(multiThings)) .containsExactly(MultiThingX.class, MultiThingA.class) .inOrder(); }
public static String getDoneFileName(JobIndexInfo indexInfo) throws IOException { return getDoneFileName(indexInfo, JHAdminConfig.DEFAULT_MR_HS_JOBNAME_LIMIT); }
@Test public void testJobNameWithMultibyteChars() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); info.setJobId(jobId); info.setSubmitTime(Long.parseLong(SUBMIT_TIME)); info.setUser(USER_NAME); StringBuilder sb = new StringBuilder(); info.setFinishTime(Long.parseLong(FINISH_TIME)); info.setNumMaps(Integer.parseInt(NUM_MAPS)); info.setNumReduces(Integer.parseInt(NUM_REDUCES)); info.setJobStatus(JOB_STATUS); info.setQueueName(QUEUE_NAME); info.setJobStartTime(Long.parseLong(JOB_START_TIME)); // Test for 1 byte UTF-8 character // which is encoded into 1 x 3 = 3 characters by URL encode. for (int i = 0; i < 100; i++) { sb.append('%'); } String longJobName = sb.toString(); info.setJobName(longJobName); String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 50); assertTrue(jobHistoryFile.length() <= 255); String trimedJobName = jobHistoryFile.split( FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name // 3 x 16 < 50 < 3 x 17 so the length of trimedJobName should be 48 assertEquals(48, trimedJobName.getBytes(UTF_8).length); // validate whether trimmedJobName by testing reversibility byte[] trimedJobNameInByte = trimedJobName.getBytes(UTF_8); String reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8); assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8)); sb.setLength(0); // Test for 2 bytes UTF-8 character // which is encoded into 2 x 3 = 6 characters by URL encode. for (int i = 0; i < 100; i++) { sb.append('\u03A9'); // large omega } longJobName = sb.toString(); info.setJobName(longJobName); jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 27); assertTrue(jobHistoryFile.length() <= 255); trimedJobName = jobHistoryFile.split( FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name // 6 x 4 < 27 < 6 x 5 so the length of trimedJobName should be 24 assertEquals(24, trimedJobName.getBytes(UTF_8).length); // validate whether trimmedJobName by testing reversibility trimedJobNameInByte = trimedJobName.getBytes(UTF_8); reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8); assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8)); sb.setLength(0); // Test for 3 bytes UTF-8 character // which is encoded into 3 x 3 = 9 characters by URL encode. for (int i = 0; i < 100; i++) { sb.append('\u2192'); // rightwards arrow } longJobName = sb.toString(); info.setJobName(longJobName); jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 40); assertTrue(jobHistoryFile.length() <= 255); trimedJobName = jobHistoryFile.split( FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name // 9 x 4 < 40 < 9 x 5 so the length of trimedJobName should be 36 assertEquals(36, trimedJobName.getBytes(UTF_8).length); // validate whether trimmedJobName by testing reversibility trimedJobNameInByte = trimedJobName.getBytes(UTF_8); reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8); assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8)); sb.setLength(0); // Test for 4 bytes UTF-8 character // which is encoded into 4 x 3 = 12 characters by URL encode. for (int i = 0; i < 100; i++) { sb.append("\uD867\uDE3D"); // Mugil cephalus in Kanji. } longJobName = sb.toString(); info.setJobName(longJobName); jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 49); assertTrue(jobHistoryFile.length() <= 255); trimedJobName = jobHistoryFile.split( FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name // 12 x 4 < 49 < 12 x 5 so the length of trimedJobName should be 48 assertEquals(48, trimedJobName.getBytes(UTF_8).length); // validate whether trimmedJobName by testing reversibility trimedJobNameInByte = trimedJobName.getBytes(UTF_8); reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8); assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8)); sb.setLength(0); // Test for the combination of 1 to 4 bytes UTF-8 characters sb.append('\u732B') // cat in Kanji (encoded into 3 bytes x 3 characters) .append("[") // (encoded into 1 byte x 3 characters) .append('\u03BB') // small lambda (encoded into 2 bytes x 3 characters) .append('/') // (encoded into 1 byte x 3 characters) .append('A') // not url-encoded (1 byte x 1 character) .append("\ud867\ude49") // flying fish in // Kanji (encoded into 4 bytes x 3 characters) .append('\u72AC'); // dog in Kanji (encoded into 3 bytes x 3 characters) longJobName = sb.toString(); info.setJobName(longJobName); jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 23); assertTrue(jobHistoryFile.length() <= 255); trimedJobName = jobHistoryFile.split( FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name // total size of the first 5 characters = 22 // 23 < total size of the first 6 characters assertEquals(22, trimedJobName.getBytes(UTF_8).length); // validate whether trimmedJobName by testing reversibility trimedJobNameInByte = trimedJobName.getBytes(UTF_8); reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8); assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8)); }
public Set<MapperConfig> load(InputStream inputStream) throws IOException { final PrometheusMappingConfig config = ymlMapper.readValue(inputStream, PrometheusMappingConfig.class); return config.metricMappingConfigs() .stream() .flatMap(this::mapMetric) .collect(Collectors.toSet()); }
@Test void loadBytes() throws Exception { final Map<String, ImmutableList<Serializable>> config = Collections.singletonMap("metric_mappings", ImmutableList.of( ImmutableMap.of( "metric_name", "test1", "match_pattern", "foo.*.bar", "wildcard_extract_labels", ImmutableList.of("first"), "additional_labels", ImmutableMap.of("another", "label") ), ImmutableMap.of( "metric_name", "test2", "match_pattern", "hello.world", "additional_labels", ImmutableMap.of("another", "label") ), ImmutableMap.of( "metric_name", "test3", "match_pattern", "one.*.three", "wildcard_extract_labels", ImmutableList.of("two") ) )); assertThat(configLoader.load(new ByteArrayInputStream(objectMapper.writeValueAsBytes(config)))).containsExactlyInAnyOrder( new MapperConfig( "foo.*.bar", "gl_test1", ImmutableMap.of( "node", "5ca1ab1e-0000-4000-a000-000000000000", "another", "label", "first", "${0}" ) ), new MapperConfig( "hello.world", "gl_test2", ImmutableMap.of( "node", "5ca1ab1e-0000-4000-a000-000000000000", "another", "label" ) ), new MapperConfig( "one.*.three", "gl_test3", ImmutableMap.of( "node", "5ca1ab1e-0000-4000-a000-000000000000", "two", "${0}" ) ) ); }
@Override @CheckForNull public EmailMessage format(Notification notif) { if (!(notif instanceof ChangesOnMyIssuesNotification)) { return null; } ChangesOnMyIssuesNotification notification = (ChangesOnMyIssuesNotification) notif; if (notification.getChange() instanceof AnalysisChange) { checkState(!notification.getChangedIssues().isEmpty(), "changedIssues can't be empty"); return formatAnalysisNotification(notification.getChangedIssues().keySet().iterator().next(), notification); } return formatMultiProject(notification); }
@Test public void formats_returns_html_message_with_multiple_links_by_rule_of_groups_of_up_to_40_issues_when_analysis_change() { Project project1 = newProject("1"); Rule rule1 = newRandomNotAHotspotRule("1"); Rule rule2 = newRandomNotAHotspotRule("a"); String host = randomAlphabetic(15); String issueStatusClosed = STATUS_CLOSED; String otherIssueStatus = STATUS_RESOLVED; List<ChangedIssue> changedIssues = Stream.of( IntStream.range(0, 39).mapToObj(i -> newChangedIssue("39_" + i, issueStatusClosed, project1, rule1)), IntStream.range(0, 40).mapToObj(i -> newChangedIssue("40_" + i, issueStatusClosed, project1, rule2)), IntStream.range(0, 81).mapToObj(i -> newChangedIssue("1-40_41-80_1_" + i, otherIssueStatus, project1, rule2)), IntStream.range(0, 6).mapToObj(i -> newChangedIssue("6_" + i, otherIssueStatus, project1, rule1))) .flatMap(t -> t) .collect(toList()); Collections.shuffle(changedIssues); AnalysisChange analysisChange = newAnalysisChange(); when(emailSettings.getServerBaseURL()).thenReturn(host); EmailMessage emailMessage = underTest.format(new ChangesOnMyIssuesNotification(analysisChange, ImmutableSet.copyOf(changedIssues))); HtmlFragmentAssert.assertThat(emailMessage.getMessage()) .hasParagraph().hasParagraph() // skip header .hasParagraph("Closed issues:") // skip title based on status .hasList( "Rule " + rule1.getName() + " - See all 39 issues", "Rule " + rule2.getName() + " - See all 40 issues") .withLink("See all 39 issues", host + "/project/issues?id=" + project1.getKey() + "&issues=" + IntStream.range(0, 39).mapToObj(i -> "39_" + i).sorted().collect(joining("%2C"))) .withLink("See all 40 issues", host + "/project/issues?id=" + project1.getKey() + "&issues=" + IntStream.range(0, 40).mapToObj(i -> "40_" + i).sorted().collect(joining("%2C"))) .hasParagraph("Open issues:") .hasList( "Rule " + rule2.getName() + " - See issues 1-40 41-80 81", "Rule " + rule1.getName() + " - See all 6 issues") .withLink("1-40", host + "/project/issues?id=" + project1.getKey() + "&issues=" + IntStream.range(0, 81).mapToObj(i -> "1-40_41-80_1_" + i).sorted().limit(40).collect(joining("%2C"))) .withLink("41-80", host + "/project/issues?id=" + project1.getKey() + "&issues=" + IntStream.range(0, 81).mapToObj(i -> "1-40_41-80_1_" + i).sorted().skip(40).limit(40).collect(joining("%2C"))) .withLink("81", host + "/project/issues?id=" + project1.getKey() + "&issues=" + "1-40_41-80_1_9" + "&open=" + "1-40_41-80_1_9") .withLink("See all 6 issues", host + "/project/issues?id=" + project1.getKey() + "&issues=" + IntStream.range(0, 6).mapToObj(i -> "6_" + i).sorted().collect(joining("%2C"))) .hasParagraph().hasParagraph() // skip footer .noMoreBlock(); }
@Override @Transactional(rollbackFor = Exception.class) public void updateSpu(ProductSpuSaveReqVO updateReqVO) { // 校验 SPU 是否存在 validateSpuExists(updateReqVO.getId()); // 校验分类、品牌 validateCategory(updateReqVO.getCategoryId()); brandService.validateProductBrand(updateReqVO.getBrandId()); // 校验SKU List<ProductSkuSaveReqVO> skuSaveReqList = updateReqVO.getSkus(); productSkuService.validateSkuList(skuSaveReqList, updateReqVO.getSpecType()); // 更新 SPU ProductSpuDO updateObj = BeanUtils.toBean(updateReqVO, ProductSpuDO.class); initSpuFromSkus(updateObj, skuSaveReqList); productSpuMapper.updateById(updateObj); // 批量更新 SKU productSkuService.updateSkuList(updateObj.getId(), updateReqVO.getSkus()); }
@Test public void testUpdateSpu_success() { // 准备参数 ProductSpuDO createReqVO = randomPojo(ProductSpuDO.class,o->{ o.setCategoryId(generateId()); o.setBrandId(generateId()); o.setDeliveryTemplateId(generateId()); o.setSort(RandomUtil.randomInt(1,100)); // 限制排序范围 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setVirtualSalesCount(generaInt()); // 限制范围为正整数 o.setPrice(generaInt()); // 限制范围为正整数 o.setMarketPrice(generaInt()); // 限制范围为正整数 o.setCostPrice(generaInt()); // 限制范围为正整数 o.setStock(generaInt()); // 限制范围为正整数 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setSalesCount(generaInt()); // 限制范围为正整数 o.setBrowseCount(generaInt()); // 限制范围为正整数 }); productSpuMapper.insert(createReqVO); // 准备参数 ProductSkuSaveReqVO skuCreateOrUpdateReqVO = randomPojo(ProductSkuSaveReqVO.class, o->{ // 限制范围为正整数 o.setCostPrice(generaInt()); o.setPrice(generaInt()); o.setMarketPrice(generaInt()); o.setStock(generaInt()); o.setFirstBrokeragePrice(generaInt()); o.setSecondBrokeragePrice(generaInt()); // 限制分数为两位数 o.setWeight(RandomUtil.randomDouble(10,2, RoundingMode.HALF_UP)); o.setVolume(RandomUtil.randomDouble(10,2, RoundingMode.HALF_UP)); }); // 准备参数 ProductSpuSaveReqVO reqVO = randomPojo(ProductSpuSaveReqVO.class, o -> { o.setId(createReqVO.getId()); // 设置更新的 ID o.setCategoryId(generateId()); o.setBrandId(generateId()); o.setSort(RandomUtil.randomInt(1,100)); // 限制排序范围 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setVirtualSalesCount(generaInt()); // 限制范围为正整数 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setSalesCount(generaInt()); // 限制范围为正整数 o.setBrowseCount(generaInt()); // 限制范围为正整数 o.setSkus(newArrayList(skuCreateOrUpdateReqVO,skuCreateOrUpdateReqVO,skuCreateOrUpdateReqVO)); }); when(categoryService.getCategoryLevel(eq(reqVO.getCategoryId()))).thenReturn(2); // 调用 productSpuService.updateSpu(reqVO); // 校验是否更新正确 ProductSpuDO spu = productSpuMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, spu); }
public Struct put(String fieldName, Object value) { Field field = lookupField(fieldName); return put(field, value); }
@Test public void testInvalidMapKeyElements() { assertThrows(DataException.class, () -> new Struct(NESTED_SCHEMA).put("map", Collections.singletonMap("should fail because keys should be int8s", (byte) 12))); }
@Override public void run() { try { // make sure we call afterRun() even on crashes // and operate countdown latches, else we may hang the parallel runner if (steps == null) { beforeRun(); } if (skipped) { return; } int count = steps.size(); int index = 0; while ((index = nextStepIndex()) < count) { currentStep = steps.get(index); execute(currentStep); if (currentStepResult != null) { // can be null if debug step-back or hook skip result.addStepResult(currentStepResult); } } } catch (Exception e) { if (currentStepResult != null) { result.addStepResult(currentStepResult); } logError("scenario [run] failed\n" + StringUtils.throwableToString(e)); currentStepResult = result.addFakeStepResult("scenario [run] failed", e); } finally { if (!skipped) { afterRun(); if (isFailed() && engine.getConfig().isAbortSuiteOnFailure()) { featureRuntime.suite.abort(); } } if (caller.isNone()) { logAppender.close(); // reclaim memory } } }
@Test void testContinueOnStepFailure3() { fail = true; // bad idea to continue/ignore anything else other than match but ... run( "def var = 'foo'", "configure continueOnStepFailure = { enabled: true, continueAfter: true, keywords: ['match', 'def'] }", "match var == 'bar'", "def var2 = function() { syntax error in here };", "match var == 'pub'", "match var == 'crawl'", "match var == 'foo'", "configure continueOnStepFailure = { enabled: false }", "match var == 'foo'", "match var == 'bar2'", "match var == 'foo'" ); assertEquals("match var == 'bar2'", sr.result.getFailedStep().getStep().getText()); }
public String toString() { StringBuilder sb = new StringBuilder(); sb.append("[ signature:=") .append(new String(getSignature(), UTF_8)) .append(System.getProperty("line.separator")); sb.append("version:=\t") .append(getVersion()) .append(System.getProperty("line.separator")); sb.append("header_len:=\t") .append(getHeader_len()) .append(System.getProperty("line.separator")); sb.append("unknown_00c:=\t") .append(getUnknown_000c()) .append(System.getProperty("line.separator")); sb.append("block_len:=\t") .append(getBlock_len()) .append(" [directory chunk size]") .append(System.getProperty("line.separator")); sb.append("blockidx_intvl:=") .append(getBlockidx_intvl()) .append(", density of quickref section, usually 2") .append(System.getProperty("line.separator")); sb.append("index_depth:=\t") .append(getIndex_depth()) .append(", depth of the index tree - 1 there is no index, 2 if there is one level of PMGI") .append(" chunk") .append(System.getProperty("line.separator")); sb.append("index_root:=\t") .append(getIndex_root()) .append(", chunk number of root index chunk, -1 if there is none") .append(System.getProperty("line.separator")); sb.append("index_head:=\t") .append(getIndex_head()) .append(", chunk number of first PMGL (listing) chunk") .append(System.getProperty("line.separator")); sb.append("unknown_0024:=\t") .append(getUnknown_0024()) .append(", chunk number of last PMGL (listing) chunk") .append(System.getProperty("line.separator")); sb.append("num_blocks:=\t") .append(getNum_blocks()) .append(", -1 (unknown)") .append(System.getProperty("line.separator")); sb.append("unknown_002c:=\t") .append(getUnknown_002c()).append(", number of directory chunks (total)") .append(System.getProperty("line.separator")); sb.append("lang_id:=\t") .append(getLang_id()) .append(" - ") .append(ChmCommons.getLanguage(getLang_id())) .append(System.getProperty("line.separator")); sb.append("system_uuid:=") .append(Arrays.toString(getSystem_uuid())) .append(System.getProperty("line.separator")); sb.append("unknown_0044:=") .append(Arrays.toString(getUnknown_0044())) .append(" ]"); return sb.toString(); }
@Test public void testToString() { assertTrue(chmItspHeader.toString().contains(TestParameters.VP_ISTP_SIGNATURE)); }
@Override public void export(RegisterTypeEnum registerType) { if (this.exported) { return; } if (getScopeModel().isLifeCycleManagedExternally()) { // prepare model for reference getScopeModel().getDeployer().prepare(); } else { // ensure start module, compatible with old api usage getScopeModel().getDeployer().start(); } synchronized (this) { if (this.exported) { return; } if (!this.isRefreshed()) { this.refresh(); } if (this.shouldExport()) { this.init(); if (shouldDelay()) { // should register if delay export doDelayExport(); } else if (Integer.valueOf(-1).equals(getDelay()) && Boolean.parseBoolean(ConfigurationUtils.getProperty( getScopeModel(), CommonConstants.DUBBO_MANUAL_REGISTER_KEY, "false"))) { // should not register by default doExport(RegisterTypeEnum.MANUAL_REGISTER); } else { doExport(registerType); } } } }
@Test void testProxy() throws Exception { service2.export(); assertThat(service2.getExportedUrls(), hasSize(1)); assertEquals(2, TestProxyFactory.count); // local injvm and registry protocol, so expected is 2 TestProxyFactory.count = 0; }
@Deprecated public void disableGenericTypes() { serializerConfig.setGenericTypes(false); }
@Test void testDisableGenericTypes() { SerializerConfigImpl conf = new SerializerConfigImpl(); TypeInformation<Object> typeInfo = new GenericTypeInfo<>(Object.class); // by default, generic types are supported TypeSerializer<Object> serializer = typeInfo.createSerializer(conf); assertThat(serializer).isInstanceOf(KryoSerializer.class); // expect an exception when generic types are disabled conf.setGenericTypes(false); assertThatThrownBy( () -> typeInfo.createSerializer(conf), "should have failed with an exception") .isInstanceOf(UnsupportedOperationException.class); }
public String locType() { String l = get(LATITUDE, null); String x = get(GRID_X, null); String def = l != null ? LOC_TYPE_GEO : (x != null ? LOC_TYPE_GRID : LOC_TYPE_NONE); return get(LOC_TYPE, def); }
@Test public void geoLocationType() { cfg.locType(GEO); print(cfg); assertEquals("not geo", GEO, cfg.locType()); }
@Override public RestLiResponseData<CreateResponseEnvelope> buildRestLiResponseData(Request request, RoutingResult routingResult, Object result, Map<String, String> headers, List<HttpCookie> cookies) { CreateResponse createResponse = (CreateResponse) result; boolean isGetAfterCreate = createResponse instanceof CreateKVResponse; if (createResponse.hasError()) { RestLiServiceException exception = createResponse.getError(); return new RestLiResponseDataImpl<>(new CreateResponseEnvelope(exception, isGetAfterCreate), headers, cookies); } Object id = null; if (createResponse.hasId()) { id = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(createResponse.getId(), routingResult); final ProtocolVersion protocolVersion = routingResult.getContext().getRestliProtocolVersion(); String stringKey = URIParamUtils.encodeKeyForUri(id, UriComponent.Type.PATH_SEGMENT, protocolVersion); UriBuilder uribuilder = UriBuilder.fromUri(request.getURI()); uribuilder.path(stringKey); uribuilder.replaceQuery(null); if (routingResult.getContext().hasParameter(RestConstants.ALT_KEY_PARAM)) { // add altkey param to location URI uribuilder.queryParam(RestConstants.ALT_KEY_PARAM, routingResult.getContext().getParameter(RestConstants.ALT_KEY_PARAM)); } headers.put(RestConstants.HEADER_LOCATION, uribuilder.build((Object) null).toString()); headers.put(HeaderUtil.getIdHeaderName(protocolVersion), URIParamUtils.encodeKeyForHeader(id, protocolVersion)); } // Verify that a null status was not passed into the CreateResponse. If so, this is a developer error. if (createResponse.getStatus() == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. HttpStatus is null inside of a CreateResponse from the resource method: " + routingResult.getResourceMethod()); } final ResourceContext resourceContext = routingResult.getContext(); RecordTemplate idResponse; if (createResponse instanceof CreateKVResponse && resourceContext.isReturnEntityRequested()) { RecordTemplate entity = ((CreateKVResponse<?, ?>) createResponse).getEntity(); // Verify that a null entity was not passed into the CreateKVResponse. If so, this is a developer error. if (entity == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Entity is null inside of a CreateKVResponse when the entity should be returned. In resource method: " + routingResult.getResourceMethod()); } DataMap entityData = entity.data(); TimingContextUtil.beginTiming(resourceContext.getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); final DataMap data = RestUtils.projectFields(entityData, resourceContext); TimingContextUtil.endTiming(resourceContext.getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); idResponse = new AnyRecord(data); // Ideally, we should set an IdEntityResponse to the envelope. But we are keeping AnyRecord // to make sure the runtime object is backwards compatible. // idResponse = new IdEntityResponse<>(id, new AnyRecord(data)); } else //Instance of idResponse { idResponse = new IdResponse<>(id); } return new RestLiResponseDataImpl<>(new CreateResponseEnvelope(createResponse.getStatus(), idResponse, isGetAfterCreate), headers, cookies); }
@Test public void testBuilderException() throws URISyntaxException { CompoundKey compoundKey = new CompoundKey().append("a", "a").append("b", 1); CreateResponse createResponse = new CreateResponse(compoundKey, null); RestRequest restRequest = new RestRequestBuilder(new URI("/foo")).build(); ProtocolVersion protocolVersion = AllProtocolVersions.RESTLI_PROTOCOL_1_0_0.getProtocolVersion(); Map<String, String> headers = ResponseBuilderUtil.getHeaders(); headers.put(RestConstants.HEADER_RESTLI_PROTOCOL_VERSION, protocolVersion.toString()); ResourceMethodDescriptor mockDescriptor = getMockResourceMethodDescriptor(null); ServerResourceContext mockContext = getMockResourceContext(protocolVersion, null); RoutingResult routingResult = new RoutingResult(mockContext, mockDescriptor); CreateResponseBuilder createResponseBuilder = new CreateResponseBuilder(); try { createResponseBuilder.buildRestLiResponseData(restRequest, routingResult, createResponse, headers, Collections.emptyList()); Assert.fail("buildRestLiResponseData should have thrown an exception because the status is null!"); } catch (RestLiServiceException e) { Assert.assertTrue(e.getMessage().contains("Unexpected null encountered. HttpStatus is null inside of a CreateResponse from the resource method: ")); } }
@Override public void failover(NamedNode master) { connection.sync(RedisCommands.SENTINEL_FAILOVER, master.getName()); }
@Test public void testFailover() throws InterruptedException { Collection<RedisServer> masters = connection.masters(); connection.failover(masters.iterator().next()); Thread.sleep(10000); RedisServer newMaster = connection.masters().iterator().next(); assertThat(masters.iterator().next().getPort()).isNotEqualTo(newMaster.getPort()); }
@VisibleForTesting static <T extends GenericJson> T clone(T cloudObject, Class<T> type) throws IOException { return cloudObject.getFactory().fromString(cloudObject.toString(), type); }
@Test public void testClone() throws Exception { ParallelInstruction copy = LengthPrefixUnknownCoders.clone(instruction, ParallelInstruction.class); assertNotSame(instruction, copy); assertEquals(instruction, copy); }
public List<String> toList(boolean trim) { return toList((str) -> trim ? StrUtil.trim(str) : str); }
@Test public void splitToSingleTest(){ String text = ""; SplitIter splitIter = new SplitIter(text, new CharFinder(':'), 3, false ); final List<String> strings = splitIter.toList(false); assertEquals(1, strings.size()); }
@Override public void setValue(String value) throws IOException { checkValue(value); // if there are export values/an Opt entry there is a different // approach to setting the value if (!getExportValues().isEmpty()) { updateByOption(value); } else { updateByValue(value); } applyChange(); }
@Test void setCheckboxInvalidValue() throws IOException { PDCheckBox checkbox = (PDCheckBox) acrobatAcroForm.getField("Checkbox"); // Set a value which doesn't match the radio button list assertThrows(IllegalArgumentException.class, () -> checkbox.setValue("InvalidValue")); }
@Override public void execute(String commandName, BufferedReader reader, BufferedWriter writer) throws Py4JException, IOException { String subCommand = safeReadLine(reader); boolean unknownSubCommand = false; String param = reader.readLine(); String returnCommand = null; try { final String[] names; if (subCommand.equals(DIR_FIELDS_SUBCOMMAND_NAME)) { Object targetObject = gateway.getObject(param); names = reflectionEngine.getPublicFieldNames(targetObject); } else if (subCommand.equals(DIR_METHODS_SUBCOMMAND_NAME)) { Object targetObject = gateway.getObject(param); names = reflectionEngine.getPublicMethodNames(targetObject); } else if (subCommand.equals(DIR_STATIC_SUBCOMMAND_NAME)) { Class<?> clazz = TypeUtil.forName(param); names = reflectionEngine.getPublicStaticNames(clazz); } else if (subCommand.equals(DIR_JVMVIEW_SUBCOMMAND_NAME)) { names = getJvmViewNames(param, reader); } else { names = null; unknownSubCommand = true; } // Read and discard end of command reader.readLine(); if (unknownSubCommand) { returnCommand = Protocol.getOutputErrorCommand("Unknown Array SubCommand Name: " + subCommand); } else if (names == null) { ReturnObject returnObject = gateway.getReturnObject(null); returnCommand = Protocol.getOutputCommand(returnObject); } else { StringBuilder namesJoinedBuilder = new StringBuilder(); for (String name : names) { namesJoinedBuilder.append(name); namesJoinedBuilder.append("\n"); } final String namesJoined; if (namesJoinedBuilder.length() > 0) { namesJoined = namesJoinedBuilder.substring(0, namesJoinedBuilder.length() - 1); } else { namesJoined = ""; } ReturnObject returnObject = gateway.getReturnObject(namesJoined); returnCommand = Protocol.getOutputCommand(returnObject); } } catch (Exception e) { logger.log(Level.FINEST, "Error in a dir subcommand", e); returnCommand = Protocol.getOutputErrorCommand(); } logger.finest("Returning command: " + returnCommand); writer.write(returnCommand); writer.flush(); }
@Test public void testDirFields() throws Exception { String inputCommand = "f\n" + target + "\ne\n"; assertTrue(gateway.getBindings().containsKey(target)); command.execute("d", new BufferedReader(new StringReader(inputCommand)), writer); Set<String> fields = convertResponse(sWriter.toString()); assertEquals(ExampleClassFields, fields); }
public String process(final Expression expression) { return formatExpression(expression); }
@Test public void shouldProcessBasicJavaMath() { // Given: final Expression expression = new ArithmeticBinaryExpression(Operator.ADD, COL0, COL3); // When: final String javaExpression = sqlToJavaVisitor.process(expression); // Then: assertThat(javaExpression, equalTo("(((java.lang.Long) arguments.get(\"COL0\")) + ((java.lang.Double) arguments.get(\"COL3\")))")); }
@VisibleForTesting SmsTemplateDO validateSmsTemplate(String templateCode) { // 获得短信模板。考虑到效率,从缓存中获取 SmsTemplateDO template = smsTemplateService.getSmsTemplateByCodeFromCache(templateCode); // 短信模板不存在 if (template == null) { throw exception(SMS_SEND_TEMPLATE_NOT_EXISTS); } return template; }
@Test public void testCheckSmsTemplateValid_notExists() { // 准备参数 String templateCode = randomString(); // mock 方法 // 调用,并断言异常 assertServiceException(() -> smsSendService.validateSmsTemplate(templateCode), SMS_SEND_TEMPLATE_NOT_EXISTS); }
public Set<Integer> getAmbiguousTargets() { final Set<Integer> ambiguousTargets = CollectionUtil.newHashSetWithExpectedSize(numberOfTargets); final BitSet usedTargets = new BitSet(numberOfTargets); for (int[] targets : mappings) { for (int target : targets) { if (usedTargets.get(target)) { ambiguousTargets.add(target); } else { usedTargets.set(target); } } } return ambiguousTargets; }
@Test void testAmbiguousTargets() { RescaleMappings mapping = mappings(to(0), to(1, 2), to(), to(2, 3, 4), to(4, 5), to()); assertThat(mapping.getAmbiguousTargets()).containsExactly(2, 4); }
@Override public List<Integer> embed(String text, Context context) { throw new UnsupportedOperationException("This embedder only supports embed with tensor type"); }
@Test public void testCachingFloat() { int initialEmbeddingsDone = runtime.embeddingsDone; var context = new Embedder.Context("schema.indexing"); var input = "This is a test string to embed"; var t1 = (MixedTensor) embedder.embed(input, context,TensorType.fromSpec("tensor<float>(dt{},x[8])")); assertEquals(initialEmbeddingsDone + 1, runtime.embeddingsDone); var t2 = (MixedTensor)embedder.embed(input, context,TensorType.fromSpec("tensor<float>(dt{},x[4])")); assertEquals("Cached value was used", initialEmbeddingsDone + 1, runtime.embeddingsDone); assertNotEquals(t1,t2); for(int token = 0; token < 7; token ++) { for(int dim = 0; dim < 4; dim++) { // the four first should be equal assertEquals(t1.get(TensorAddress.of(token,dim)),t2.get(TensorAddress.of(token,dim)), 1e-6); } } // t2 only has 4 dimensions so this should be out of bounds which returns 0 assertEquals(0, t2.get(TensorAddress.of(1,4)), 1e-6); input = "This is a different test string to embed"; embedder.embed(input, context,TensorType.fromSpec("tensor<float>(dt{},x[8])")); assertEquals(initialEmbeddingsDone + 2, runtime.embeddingsDone); }
@Override protected void doDelete(final List<AppAuthData> dataList) { dataList.forEach(appAuthData -> authDataSubscribers.forEach(authDataSubscriber -> authDataSubscriber.unSubscribe(appAuthData))); }
@Test public void testDoDelete() { List<AppAuthData> appAuthDataList = createFakerAppAuthDataObjects(3); authDataHandler.doDelete(appAuthDataList); appAuthDataList.forEach(appAuthData -> authDataSubscribers.forEach(authDataSubscriber -> verify(authDataSubscriber).unSubscribe(appAuthData))); }
public static PartitionSpec fromJson(Schema schema, JsonNode json) { return fromJson(json).bind(schema); }
@TestTemplate public void testFromJsonWithoutFieldId() { String specString = "{\n" + " \"spec-id\" : 1,\n" + " \"fields\" : [ {\n" + " \"name\" : \"id_bucket\",\n" + " \"transform\" : \"bucket[8]\",\n" + " \"source-id\" : 1\n" + " }, {\n" + " \"name\" : \"data_bucket\",\n" + " \"transform\" : \"bucket[16]\",\n" + " \"source-id\" : 2\n" + " } ]\n" + "}"; PartitionSpec spec = PartitionSpecParser.fromJson(table.schema(), specString); assertThat(spec.fields()).hasSize(2); // should be the default assignment assertThat(spec.fields().get(0).fieldId()).isEqualTo(1000); assertThat(spec.fields().get(1).fieldId()).isEqualTo(1001); }
@VisibleForTesting static VertexParallelismStore computeVertexParallelismStoreForExecution( JobGraph jobGraph, SchedulerExecutionMode executionMode, Function<JobVertex, Integer> defaultMaxParallelismFunc) { if (executionMode == SchedulerExecutionMode.REACTIVE) { return computeReactiveModeVertexParallelismStore( jobGraph.getVertices(), defaultMaxParallelismFunc, false); } return SchedulerBase.computeVertexParallelismStore( jobGraph.getVertices(), defaultMaxParallelismFunc); }
@Test void testComputeVertexParallelismStoreForExecutionInReactiveMode() { JobVertex v1 = createNoOpVertex("v1", 1, 50); JobVertex v2 = createNoOpVertex("v2", 50, 50); JobGraph graph = streamingJobGraph(v1, v2); VertexParallelismStore parallelismStore = AdaptiveScheduler.computeVertexParallelismStoreForExecution( graph, SchedulerExecutionMode.REACTIVE, SchedulerBase::getDefaultMaxParallelism); for (JobVertex vertex : graph.getVertices()) { VertexParallelismInformation info = parallelismStore.getParallelismInfo(vertex.getID()); assertThat(info.getParallelism()).isEqualTo(vertex.getParallelism()); assertThat(info.getMaxParallelism()).isEqualTo(vertex.getMaxParallelism()); } }
public String getOutFile() { return outFile; }
@Test @DirtiesContext public void testCreateEndpointWithOutFile() throws Exception { String outFile = "output.txt"; ExecEndpoint e = createExecEndpoint("exec:test?outFile=" + outFile); assertEquals(outFile, e.getOutFile()); }
@Override public void updateProject(GoViewProjectUpdateReqVO updateReqVO) { // 校验存在 validateProjectExists(updateReqVO.getId()); // 更新 GoViewProjectDO updateObj = GoViewProjectConvert.INSTANCE.convert(updateReqVO); goViewProjectMapper.updateById(updateObj); }
@Test public void testUpdateProject_notExists() { // 准备参数 GoViewProjectUpdateReqVO reqVO = randomPojo(GoViewProjectUpdateReqVO.class); // 调用, 并断言异常 assertServiceException(() -> goViewProjectService.updateProject(reqVO), GO_VIEW_PROJECT_NOT_EXISTS); }
@Override public Set<Entry<Integer, R>> entrySet() { assert baseDirInitialized(); return entrySet; }
@Test public void eagerLoading() { Map.Entry[] b = a.entrySet().toArray(new Map.Entry[3]); ((Build) b[0].getValue()).asserts(5); ((Build) b[1].getValue()).asserts(3); ((Build) b[2].getValue()).asserts(1); }
public static void maskCredential(Map<String, String> properties) { // Mask for aws's credential doMask(properties, CloudConfigurationConstants.AWS_S3_ACCESS_KEY); doMask(properties, CloudConfigurationConstants.AWS_S3_SECRET_KEY); doMask(properties, CloudConfigurationConstants.AWS_GLUE_ACCESS_KEY); doMask(properties, CloudConfigurationConstants.AWS_GLUE_SECRET_KEY); // Mask for azure's credential doMask(properties, CloudConfigurationConstants.AZURE_BLOB_SHARED_KEY); doMask(properties, CloudConfigurationConstants.AZURE_BLOB_SAS_TOKEN); doMask(properties, CloudConfigurationConstants.AZURE_ADLS1_OAUTH2_CREDENTIAL); doMask(properties, CloudConfigurationConstants.AZURE_ADLS2_SHARED_KEY); doMask(properties, CloudConfigurationConstants.AZURE_ADLS2_OAUTH2_CLIENT_SECRET); // Mask for gcs's credential doMask(properties, CloudConfigurationConstants.GCP_GCS_SERVICE_ACCOUNT_PRIVATE_KEY); // Mask for aliyun's credential doMask(properties, CloudConfigurationConstants.ALIYUN_OSS_ACCESS_KEY); doMask(properties, CloudConfigurationConstants.ALIYUN_OSS_SECRET_KEY); // Mask for iceberg rest catalog credential doMask(properties, IcebergRESTCatalog.KEY_CREDENTIAL_WITH_PREFIX); // Mask for odps catalog credential doMask(properties, OdpsProperties.ACCESS_ID); doMask(properties, OdpsProperties.ACCESS_KEY); }
@Test public void testMaskIcebergRestCatalogCredential() { Map<String, String> properties = new HashMap<>(); properties.put(IcebergRESTCatalog.KEY_CREDENTIAL_WITH_PREFIX, "7758258"); CredentialUtil.maskCredential(properties); Assert.assertEquals("77******58", properties.get(IcebergRESTCatalog.KEY_CREDENTIAL_WITH_PREFIX)); }
public Annotator getAnnotator(AnnotationStyle style) { switch (style) { case JACKSON: case JACKSON2: return new Jackson2Annotator(generationConfig); case JSONB1: return new Jsonb1Annotator(generationConfig); case JSONB2: return new Jsonb2Annotator(generationConfig); case GSON: return new GsonAnnotator(generationConfig); case MOSHI1: return new Moshi1Annotator(generationConfig); case NONE: return new NoopAnnotator(); default: throw new IllegalArgumentException("Unrecognised annotation style: " + style); } }
@Test public void canCreateCompositeAnnotator() { Annotator annotator1 = mock(Annotator.class); Annotator annotator2 = mock(Annotator.class); CompositeAnnotator composite = factory.getAnnotator(annotator1, annotator2); assertThat(composite.annotators.length, equalTo(2)); assertThat(composite.annotators[0], is(equalTo(annotator1))); assertThat(composite.annotators[1], is(equalTo(annotator2))); }
public static DynamicVoter parse(String input) { input = input.trim(); int atIndex = input.indexOf("@"); if (atIndex < 0) { throw new IllegalArgumentException("No @ found in dynamic voter string."); } if (atIndex == 0) { throw new IllegalArgumentException("Invalid @ at beginning of dynamic voter string."); } String idString = input.substring(0, atIndex); int nodeId; try { nodeId = Integer.parseInt(idString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse node id in dynamic voter string.", e); } if (nodeId < 0) { throw new IllegalArgumentException("Invalid negative node id " + nodeId + " in dynamic voter string."); } input = input.substring(atIndex + 1); if (input.isEmpty()) { throw new IllegalArgumentException("No hostname found after node id."); } String host; if (input.startsWith("[")) { int endBracketIndex = input.indexOf("]"); if (endBracketIndex < 0) { throw new IllegalArgumentException("Hostname began with left bracket, but no right " + "bracket was found."); } host = input.substring(1, endBracketIndex); input = input.substring(endBracketIndex + 1); } else { int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following hostname could be found."); } host = input.substring(0, endColonIndex); input = input.substring(endColonIndex); } if (!input.startsWith(":")) { throw new IllegalArgumentException("Port section must start with a colon."); } input = input.substring(1); int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following port could be found."); } String portString = input.substring(0, endColonIndex); int port; try { port = Integer.parseInt(portString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse port in dynamic voter string.", e); } if (port < 0 || port > 65535) { throw new IllegalArgumentException("Invalid port " + port + " in dynamic voter string."); } String directoryIdString = input.substring(endColonIndex + 1); Uuid directoryId; try { directoryId = Uuid.fromString(directoryIdString); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Failed to parse directory ID in dynamic voter string.", e); } return new DynamicVoter(directoryId, nodeId, host, port); }
@Test public void testFailedToParseDirectoryId() { assertEquals("Failed to parse directory ID in dynamic voter string.", assertThrows(IllegalArgumentException.class, () -> DynamicVoter.parse("5@[2001:4860:4860::8888]:8020:%_0IZ-0DRNazJ49kCZ1EMQ")). getMessage()); }
public long skipBytes(long n) throws IOException { if (input instanceof RandomAccessFile) { long total = 0; int expected = 0; int actual = 0; do { expected = (n-total) > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) (n-total); actual = ((RandomAccessFile) input).skipBytes(expected); // RandomAccessFile::skipBytes supports int total = total + actual; } while (total < n && actual > 0); return total; } else if (input instanceof DataInputStream) { return ((DataInputStream) input).skip(n); // InputStream::skip supports long } else { throw new UnsupportedOperationException("Unknown Hollow Blob Input type"); } }
@Test public void testSkipBytes() throws IOException { HollowBlobInput inStream = HollowBlobInput.modeBasedSelector(MemoryMode.ON_HEAP, mockBlob); assertEquals(1l, inStream.skipBytes(1)); assertEquals(1, inStream.read()); // next byte read is 1 assertEquals(2000, inStream.skipBytes(2000)); // successfully skips past end of file for FileInputStream HollowBlobInput inBuffer = HollowBlobInput.modeBasedSelector(MemoryMode.SHARED_MEMORY_LAZY, mockBlob); assertEquals(1l, inBuffer.skipBytes(1)); assertEquals(1, inBuffer.read()); // next byte read is 1 assertEquals(12, inBuffer.skipBytes(2000)); // stops at end of file for RandomAccessFile }
public void validate(AlmSettingDto almSettingDto) { String gitlabUrl = almSettingDto.getUrl(); String accessToken = almSettingDto.getDecryptedPersonalAccessToken(encryption); validate(ValidationMode.COMPLETE, gitlabUrl, accessToken); }
@Test public void validate_whenCompleteMode_validatesToken() { underTest.validate(COMPLETE, GITLAB_API_URL, ACCESS_TOKEN); verify(gitlabHttpClient).checkUrl(GITLAB_API_URL); verify(gitlabHttpClient).checkToken(GITLAB_API_URL, ACCESS_TOKEN); verify(gitlabHttpClient).checkReadPermission(GITLAB_API_URL, ACCESS_TOKEN); verify(gitlabHttpClient).checkWritePermission(GITLAB_API_URL, ACCESS_TOKEN); }
public static void checkEquals( @Nullable String value, String propertyName, String expectedValue) { Preconditions.checkNotNull(value, "Property '" + propertyName + "' cannot be null"); Preconditions.checkArgument( value.equals(expectedValue), "Property '" + propertyName + "' must be '" + expectedValue + "' but is '" + value + "'"); }
@Test public void testCheckEquals_failsNull() { try { Validator.checkEquals(null, "test", "something"); Assert.fail(); } catch (NullPointerException npe) { Assert.assertEquals("Property 'test' cannot be null", npe.getMessage()); } }
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) { CharStream input = CharStreams.fromString(source); FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input ); CommonTokenStream tokens = new CommonTokenStream( lexer ); FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens ); ParserHelper parserHelper = new ParserHelper(eventsManager); additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol())); parser.setHelper(parserHelper); parser.setErrorHandler( new FEELErrorHandler() ); parser.removeErrorListeners(); // removes the error listener that prints to the console parser.addErrorListener( new FEELParserErrorListener( eventsManager ) ); // pre-loads the parser with symbols defineVariables( inputVariableTypes, inputVariables, parser ); if (typeRegistry != null) { parserHelper.setTypeRegistry(typeRegistry); } return parser; }
@Test void expressionList() { // TODO review this test is potentially wrong as the list is not homogeneous String inputExpression = "[ 10, foo * bar, true ]"; BaseNode list = parse( inputExpression ); assertThat( list).isInstanceOf(ListNode.class); assertThat( list.getResultType()).isEqualTo(BuiltInType.LIST); assertThat( list.getText()).isEqualTo( "10, foo * bar, true"); ListNode ln = (ListNode) list; assertThat( ln.getElements()).hasSize(3); assertThat( ln.getElements().get( 0 )).isInstanceOf(NumberNode.class); assertThat( ln.getElements().get( 1 )).isInstanceOf(InfixOpNode.class); assertThat( ln.getElements().get( 2 )).isInstanceOf(BooleanNode.class); }
public static long readInt64(ByteBuffer buf) throws BufferUnderflowException { return buf.order(ByteOrder.LITTLE_ENDIAN).getLong(); }
@Test(expected = ArrayIndexOutOfBoundsException.class) public void testReadInt64ThrowsException3() { ByteUtils.readInt64(new byte[]{1, 2, 3, 4, 5, 6, 7, 8, 9}, -1); }
@Override public final BoundedReader<T> createReader(PipelineOptions options) throws IOException { // Validate the current source prior to creating a reader for it. this.validate(); String fileOrPattern = fileOrPatternSpec.get(); if (mode == Mode.FILEPATTERN) { long startTime = System.currentTimeMillis(); List<Metadata> fileMetadata = FileSystems.match(fileOrPattern, emptyMatchTreatment).metadata(); LOG.info("Matched {} files for pattern {}", fileMetadata.size(), fileOrPattern); List<FileBasedReader<T>> fileReaders = new ArrayList<>(); for (Metadata metadata : fileMetadata) { long endOffset = metadata.sizeBytes(); fileReaders.add( createForSubrangeOfFile(metadata, 0, endOffset).createSingleFileReader(options)); } LOG.debug( "Creating a reader for file pattern {} took {} ms", fileOrPattern, System.currentTimeMillis() - startTime); if (fileReaders.size() == 1) { return fileReaders.get(0); } return new FilePatternReader(this, fileReaders); } else { return createSingleFileReader(options); } }
@Test public void testFractionConsumedWhenReadingFilepattern() throws IOException { List<String> data1 = createStringDataset(3, 1000); File file1 = createFileWithData("file1", data1); List<String> data2 = createStringDataset(3, 1000); createFileWithData("file2", data2); List<String> data3 = createStringDataset(3, 1000); createFileWithData("file3", data3); TestFileBasedSource source = new TestFileBasedSource(file1.getParent() + "/" + "file*", 1024, null); try (BoundedSource.BoundedReader<String> reader = source.createReader(null)) { double lastFractionConsumed = 0.0; assertEquals(0.0, reader.getFractionConsumed(), 1e-6); assertTrue(reader.start()); assertTrue(reader.advance()); assertTrue(reader.advance()); // We're inside the first file. Should be in [0, 1/3). assertTrue(reader.getFractionConsumed() > 0.0); assertTrue(reader.getFractionConsumed() < 1.0 / 3.0); while (reader.advance()) { double fractionConsumed = reader.getFractionConsumed(); assertTrue(fractionConsumed > lastFractionConsumed); lastFractionConsumed = fractionConsumed; } assertEquals(1.0, reader.getFractionConsumed(), 1e-6); } }
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { // The object that we're modifying here is a copy of the original! // So let's change the filename from relative to absolute by grabbing the file object... // if ( !Utils.isEmpty( fileName ) ) { FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName ), space ); fileName = resourceNamingInterface.nameResource( fileObject, space, true ); } return null; } catch ( Exception e ) { throw new KettleException( e ); } }
@Test public void testExportResources() throws Exception { XMLOutputMeta xmlOutputMeta = new XMLOutputMeta(); xmlOutputMeta.setDefault(); ResourceNamingInterface resourceNamingInterface = mock( ResourceNamingInterface.class ); Variables space = new Variables(); when( resourceNamingInterface.nameResource( any( FileObject.class ), eq( space ), eq( true ) ) ).thenReturn( "exportFile" ); xmlOutputMeta.exportResources( space, null, resourceNamingInterface, null, null ); assertEquals( "exportFile", xmlOutputMeta.getFileName() ); }
@Override public void interceptResponse(HttpResponse response) throws IOException { if (!LOG.isDebugEnabled()) { return; } String uploadId = response.getHeaders().getFirstHeaderStringValue(UPLOAD_HEADER); if (uploadId == null) { return; } GenericUrl url = response.getRequest().getUrl(); // The check for no upload id limits the output to one log line per upload. // The check for upload type makes sure this is an upload and not a read. if (url.get(UPLOAD_ID_PARAM) == null && url.get(UPLOAD_TYPE_PARAM) != null) { LOG.debug( "Upload ID for url {} on worker {} is {}", url, System.getProperty("worker_id"), uploadId); } }
@Test public void testResponseNoLogging() throws IOException { new UploadIdResponseInterceptor().interceptResponse(buildHttpResponse(null, null, null)); new UploadIdResponseInterceptor().interceptResponse(buildHttpResponse("hh", "a", null)); new UploadIdResponseInterceptor().interceptResponse(buildHttpResponse(null, "h", null)); new UploadIdResponseInterceptor().interceptResponse(buildHttpResponse("hh", null, null)); new UploadIdResponseInterceptor().interceptResponse(buildHttpResponse(null, null, "type")); new UploadIdResponseInterceptor().interceptResponse(buildHttpResponse("hh", "a", "type")); new UploadIdResponseInterceptor().interceptResponse(buildHttpResponse(null, "h", "type")); expectedLogs.verifyNotLogged(""); }
protected List<FileStatus> listStatus(JobContext job ) throws IOException { Path[] dirs = getInputPaths(job); if (dirs.length == 0) { throw new IOException("No input paths specified in job"); } // get tokens for all the required FileSystems.. TokenCache.obtainTokensForNamenodes(job.getCredentials(), dirs, job.getConfiguration()); // Whether we need to recursive look into the directory structure boolean recursive = getInputDirRecursive(job); // creates a MultiPathFilter with the hiddenFileFilter and the // user provided one (if any). List<PathFilter> filters = new ArrayList<PathFilter>(); filters.add(hiddenFileFilter); PathFilter jobFilter = getInputPathFilter(job); if (jobFilter != null) { filters.add(jobFilter); } PathFilter inputFilter = new MultiPathFilter(filters); List<FileStatus> result = null; int numThreads = job.getConfiguration().getInt(LIST_STATUS_NUM_THREADS, DEFAULT_LIST_STATUS_NUM_THREADS); StopWatch sw = new StopWatch().start(); if (numThreads == 1) { result = singleThreadedListStatus(job, dirs, inputFilter, recursive); } else { Iterable<FileStatus> locatedFiles = null; try { LocatedFileStatusFetcher locatedFileStatusFetcher = new LocatedFileStatusFetcher( job.getConfiguration(), dirs, recursive, inputFilter, true); locatedFiles = locatedFileStatusFetcher.getFileStatuses(); } catch (InterruptedException e) { throw (IOException) new InterruptedIOException( "Interrupted while getting file statuses") .initCause(e); } result = Lists.newArrayList(locatedFiles); } sw.stop(); if (LOG.isDebugEnabled()) { LOG.debug("Time taken to get FileStatuses: " + sw.now(TimeUnit.MILLISECONDS)); } LOG.info("Total input files to process : " + result.size()); return result; }
@Test public void testListStatusNestedNonRecursive() throws IOException { Configuration conf = new Configuration(); conf.setInt(FileInputFormat.LIST_STATUS_NUM_THREADS, numThreads); List<Path> expectedPaths = configureTestNestedNonRecursive(conf, localFs); Job job = Job.getInstance(conf); FileInputFormat<?, ?> fif = new TextInputFormat(); List<FileStatus> statuses = fif.listStatus(job); verifyFileStatuses(expectedPaths, statuses, localFs); }
@Override public void clear() { super.clear(); this.head = null; this.tail = null; this.resetBookmark(); }
@Test public void testClear() { LOG.info("Test clear"); // use addAll set.addAll(list); assertEquals(NUM, set.size()); assertFalse(set.isEmpty()); // Advance the bookmark. Iterator<Integer> bkmrkIt = set.getBookmark(); for (int i=0; i<set.size()/2+1; i++) { bkmrkIt.next(); } assertTrue(bkmrkIt.hasNext()); // clear the set set.clear(); assertEquals(0, set.size()); assertTrue(set.isEmpty()); bkmrkIt = set.getBookmark(); assertFalse(bkmrkIt.hasNext()); // poll should return an empty list assertEquals(0, set.pollAll().size()); assertEquals(0, set.pollN(10).size()); assertNull(set.pollFirst()); // iterator should be empty Iterator<Integer> iter = set.iterator(); assertFalse(iter.hasNext()); LOG.info("Test clear - DONE"); }
@Override public Flux<BooleanResponse<RenameCommand>> rename(Publisher<RenameCommand> commands) { return execute(commands, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getNewName(), "New name must not be null!"); byte[] keyBuf = toByteArray(command.getKey()); byte[] newKeyBuf = toByteArray(command.getNewName()); if (executorService.getConnectionManager().calcSlot(keyBuf) == executorService.getConnectionManager().calcSlot(newKeyBuf)) { return super.rename(commands); } return read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.DUMP, keyBuf) .filter(Objects::nonNull) .zipWith( Mono.defer(() -> pTtl(command.getKey()) .filter(Objects::nonNull) .map(ttl -> Math.max(0, ttl)) .switchIfEmpty(Mono.just(0L)) ) ) .flatMap(valueAndTtl -> { return write(newKeyBuf, StringCodec.INSTANCE, RedisCommands.RESTORE, newKeyBuf, valueAndTtl.getT2(), valueAndTtl.getT1()); }) .thenReturn(new BooleanResponse<>(command, true)) .doOnSuccess((ignored) -> del(command.getKey())); }); }
@Test public void testRename_keyNotExist() { Integer originalSlot = getSlotForKey(originalKey); newKey = getNewKeyForSlot(new String(originalKey.array()), getTargetSlot(originalSlot)); if (sameSlot) { // This is a quirk of the implementation - since same-slot renames use the non-cluster version, // the result is a Redis error. This behavior matches other spring-data-redis implementations assertThatThrownBy(() -> connection.keyCommands().rename(originalKey, newKey).block()) .isInstanceOf(RedisSystemException.class); } else { Boolean response = connection.keyCommands().rename(originalKey, newKey).block(); assertThat(response).isTrue(); final ByteBuffer newKeyValue = connection.stringCommands().get(newKey).block(); assertThat(newKeyValue).isEqualTo(null); } }
public static Instruction popMpls() { return new L2ModificationInstruction.ModMplsHeaderInstruction( L2ModificationInstruction.L2SubType.MPLS_POP, EthType.EtherType.MPLS_UNICAST.ethType()); }
@Test public void testPopMplsMethod() { final Instruction instruction = Instructions.popMpls(); final L2ModificationInstruction.ModMplsHeaderInstruction pushHeaderInstruction = checkAndConvert(instruction, Instruction.Type.L2MODIFICATION, L2ModificationInstruction.ModMplsHeaderInstruction.class); assertThat(pushHeaderInstruction.ethernetType().toString(), is(EthType.EtherType.MPLS_UNICAST.toString())); assertThat(pushHeaderInstruction.subtype(), is(L2ModificationInstruction.L2SubType.MPLS_POP)); }
boolean equivalentForeignKeyExistsInDatabase(TableInformation tableInformation, String referencingColumn, String referencedTable) { return StreamSupport.stream( tableInformation.getForeignKeys().spliterator(), false ) .flatMap( foreignKeyInformation -> StreamSupport.stream( foreignKeyInformation.getColumnReferenceMappings().spliterator(), false ) ) .anyMatch( columnReferenceMapping -> { final ColumnInformation referencingColumnMetadata = columnReferenceMapping.getReferencingColumnMetadata(); final ColumnInformation referencedColumnMetadata = columnReferenceMapping.getReferencedColumnMetadata(); final String existingReferencingColumn = referencingColumnMetadata.getColumnIdentifier().getText(); final String existingReferencedTable = referencedColumnMetadata.getContainingTableInformation().getName().getTableName().getCanonicalName(); return referencingColumn.equalsIgnoreCase( existingReferencingColumn ) && referencedTable.equalsIgnoreCase( existingReferencedTable ); } ); }
@Test @TestForIssue(jiraKey = "HHH-13779") public void testForeignKeyPreExistenceDetectionIgnoresCaseForTableAndColumnName() { final AbstractSchemaMigrator schemaMigrator = new AbstractSchemaMigrator(null, null) { @Override protected NameSpaceTablesInformation performTablesMigration(Metadata metadata, DatabaseInformation existingDatabase, ExecutionOptions options,ContributableMatcher contributableInclusionFilter, Dialect dialect, Formatter formatter, Set<String> exportIdentifiers, boolean tryToCreateCatalogs, boolean tryToCreateSchemas, Set<Identifier> exportedCatalogs, Namespace namespace, SqlStringGenerationContext sqlStringGenerationContext, GenerationTarget[] targets) { return null; } }; final TableInformation existingTableInformation = mock(TableInformation.class); final ArrayList<ForeignKeyInformation.ColumnReferenceMapping> columnReferenceMappings = new ArrayList<>(); final TableInformation destinationTableInformation = mock(TableInformation.class); doReturn(new QualifiedTableName(toIdentifier("catalog"), toIdentifier("schema"), toIdentifier("referenced_table"))) // Table name is lower case .when(destinationTableInformation).getName(); columnReferenceMappings.add(new ForeignKeyInformationImpl.ColumnReferenceMappingImpl( new ColumnInformationImpl(null, toIdentifier("referencing_column"), // column name is lower case 0, "typeName", 255, 0, TruthValue.TRUE), new ColumnInformationImpl(destinationTableInformation, null, 1, "typeName", 0, 0, TruthValue.TRUE))); doReturn(singletonList(new ForeignKeyInformationImpl(toIdentifier("FKp8mpamfw2inhj88hwhty1eipm"), columnReferenceMappings))) .when(existingTableInformation).getForeignKeys(); final boolean existInDatabase = schemaMigrator.equivalentForeignKeyExistsInDatabase( existingTableInformation, "REFERENCING_COLUMN", "REFERENCED_TABLE"); // Table and column names are UPPER-case here, to prove the test assertThat("Expected ForeignKey pre-existence check to be case-insensitive", existInDatabase, is(true)); }
@Override public Write.Append append(final Path file, final TransferStatus status) throws BackgroundException { return new Write.Append(status.isExists()).withStatus(status); }
@Test public void testAppend() throws Exception { final Path f = new Path(new FTPWorkdirService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new FTPTouchFeature(session).touch(f, new TransferStatus()); assertTrue(new FTPUploadFeature(session).append(f, new TransferStatus().exists(true).withLength(0L).withRemote(new FTPAttributesFinderFeature(session).find(f))).append); new FTPDeleteFeature(session).delete(Collections.singletonList(f), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
static void urlEncode(String str, StringBuilder sb) { for (int idx = 0; idx < str.length(); ++idx) { char c = str.charAt(idx); if ('+' == c) { sb.append("%2B"); } else if ('%' == c) { sb.append("%25"); } else { sb.append(c); } } }
@Test void testUrlEncodeForNullStringBuilder() { assertThrows(NullPointerException.class, () -> { GroupKey.urlEncode("+", null); // Method is not expected to return due to exception thrown }); // Method is not expected to return due to exception thrown }
public static String fullVersion() { ensureLoaded(); return fullVersion; }
@Test public void testFullVersion() { assertThat(IcebergBuild.fullVersion()) .as("Should build full version from version and commit ID") .isEqualTo( "Apache Iceberg " + IcebergBuild.version() + " (commit " + IcebergBuild.gitCommitId() + ")"); }
static String niceTimeUnits(long millis) { long value = millis; long[] divisors = {1000, 60, 60, 24}; String[] units = {"second", "minute", "hour", "day"}; int i = 0; while (value != 0 && i < 4) { if (value % divisors[i] == 0) { value /= divisors[i]; i++; } else { break; } } if (i > 0) { return " (" + value + " " + units[i - 1] + (value > 1 ? "s)" : ")"); } return ""; }
@Test public void testNiceTimeUnits() { assertEquals("", ConfigDef.niceTimeUnits(0)); assertEquals("", ConfigDef.niceTimeUnits(Duration.ofSeconds(1).toMillis() - 1)); assertEquals(" (1 second)", ConfigDef.niceTimeUnits(Duration.ofSeconds(1).toMillis())); assertEquals("", ConfigDef.niceTimeUnits(Duration.ofSeconds(1).toMillis() + 1)); assertEquals(" (2 seconds)", ConfigDef.niceTimeUnits(Duration.ofSeconds(2).toMillis())); assertEquals(" (1 minute)", ConfigDef.niceTimeUnits(Duration.ofMinutes(1).toMillis())); assertEquals(" (2 minutes)", ConfigDef.niceTimeUnits(Duration.ofMinutes(2).toMillis())); assertEquals(" (1 hour)", ConfigDef.niceTimeUnits(Duration.ofHours(1).toMillis())); assertEquals(" (2 hours)", ConfigDef.niceTimeUnits(Duration.ofHours(2).toMillis())); assertEquals(" (1 day)", ConfigDef.niceTimeUnits(Duration.ofDays(1).toMillis())); assertEquals(" (2 days)", ConfigDef.niceTimeUnits(Duration.ofDays(2).toMillis())); assertEquals(" (7 days)", ConfigDef.niceTimeUnits(Duration.ofDays(7).toMillis())); assertEquals(" (365 days)", ConfigDef.niceTimeUnits(Duration.ofDays(365).toMillis())); }
@Override public boolean resolve(final Local file) throws NotfoundException, LocalAccessDeniedException { if(PreferencesFactory.get().getBoolean("local.symboliclink.resolve")) { // Follow links instead return false; } // Create symbolic link only if supported by the host if(feature != null) { final Local target = file.getSymlinkTarget(); // Only create symbolic link if target is included in the upload for(TransferItem root : files) { if(this.findTarget(target, root.local)) { if(log.isDebugEnabled()) { log.debug(String.format("Resolved target %s for %s", target, file)); } return true; } } } return false; //Follow links instead }
@Test public void testResolve() throws Exception { final ArrayList<TransferItem> files = new ArrayList<>(); final Path a = new Path("/a", EnumSet.of(Path.Type.directory)); files.add(new TransferItem(a, new NullLocal(System.getProperty("java.io.tmpdir"), "a") { @Override public boolean isFile() { return false; } @Override public boolean isDirectory() { return true; } })); UploadSymlinkResolver resolver = new UploadSymlinkResolver(new Symlink() { @Override public void symlink(final Path file, final String target) { // } }, files); assertTrue(resolver.resolve(new NullLocal(System.getProperty("java.io.tmpdir"), "a" + File.separator + "b") { @Override public boolean isSymbolicLink() { return true; } @Override public boolean isFile() { return true; } @Override public Local getSymlinkTarget() { return new NullLocal(System.getProperty("java.io.tmpdir"), "a" + File.separator + "c"); } })); assertFalse(resolver.resolve(new NullLocal("/a/b") { public boolean isSymbolicLink() { return true; } @Override public boolean isFile() { return true; } @Override public Local getSymlinkTarget() { return new NullLocal("/b/c"); } })); }
@Override public void validate(PlanNode plan, Session session, Metadata metadata, SqlParser sqlParser, TypeProvider types, WarningCollector warningCollector) { int outputPlanNodesCount = searchFrom(plan) .where(OutputNode.class::isInstance) .findAll() .size(); checkState(outputPlanNodesCount == 1, "Expected plan to have single instance of OutputNode"); }
@Test(expectedExceptions = IllegalStateException.class) public void testValidateFailed() { // random plan with 2 output nodes PlanNode root = new OutputNode(Optional.empty(), idAllocator.getNextId(), new ExplainAnalyzeNode(Optional.empty(), idAllocator.getNextId(), new OutputNode(Optional.empty(), idAllocator.getNextId(), new ProjectNode(idAllocator.getNextId(), new ValuesNode(Optional.empty(), idAllocator.getNextId(), ImmutableList.of(), ImmutableList.of(), Optional.empty()), Assignments.of() ), ImmutableList.of(), ImmutableList.of() ), new VariableReferenceExpression(Optional.empty(), "a", BIGINT), false, ExplainFormat.Type.TEXT), ImmutableList.of(), ImmutableList.of()); new VerifyOnlyOneOutputNode().validate(root, null, null, null, null, WarningCollector.NOOP); }
public Topic exceptHeadToken() { List<Token> tokens = getTokens(); if (tokens.isEmpty()) { return new Topic(Collections.emptyList()); } List<Token> tokensCopy = new ArrayList<>(tokens); tokensCopy.remove(0); return new Topic(tokensCopy); }
@Test public void exceptHeadToken() { assertEquals(Topic.asTopic("token"), Topic.asTopic("/token").exceptHeadToken()); assertEquals(Topic.asTopic("a/b"), Topic.asTopic("/a/b").exceptHeadToken()); }
public static ShenyuAdminResult success() { return success(""); }
@Test public void testSuccessWithData() { final ShenyuAdminResult result = ShenyuAdminResult.success(Collections.singletonList("data")); assertEquals(CommonErrorCode.SUCCESSFUL, result.getCode().intValue()); assertNull(result.getMessage()); assertNotNull(result.getData()); assertEquals("ShenyuAdminResult{code=200, message='null', data=[data]}", result.toString()); }
@Override public void includeFiles(String[] filenames) { if (filenames != null && filenames.length > 0) { INCFILE = filenames; this.FILEFILTER = true; } }
@Test public void testIncludeFiles() { testf.includeFiles(INCL); for (TestData td : TESTDATA) { String theFile = td.file; boolean expect = td.inclfile; testf.isFiltered(theFile, null); String line = testf.filter(theFile); if (line != null) { assertTrue(expect, "Expect to accept " + theFile); } else { assertFalse(expect, "Expect to reject " + theFile); } } }
@DeleteMapping("/token") @PermitAll @Operation(summary = "删除访问令牌") @Parameter(name = "token", required = true, description = "访问令牌", example = "biu") public CommonResult<Boolean> revokeToken(HttpServletRequest request, @RequestParam("token") String token) { // 校验客户端 String[] clientIdAndSecret = obtainBasicAuthorization(request); OAuth2ClientDO client = oauth2ClientService.validOAuthClientFromCache(clientIdAndSecret[0], clientIdAndSecret[1], null, null, null); // 删除访问令牌 return success(oauth2GrantService.revokeToken(client.getClientId(), token)); }
@Test public void testRevokeToken() { // 准备参数 HttpServletRequest request = mockRequest("demo_client_id", "demo_client_secret"); String token = randomString(); // mock 方法(client) OAuth2ClientDO client = randomPojo(OAuth2ClientDO.class).setClientId("demo_client_id"); when(oauth2ClientService.validOAuthClientFromCache(eq("demo_client_id"), eq("demo_client_secret"), isNull(), isNull(), isNull())).thenReturn(client); // mock 方法(移除) when(oauth2GrantService.revokeToken(eq("demo_client_id"), eq(token))).thenReturn(true); // 调用 CommonResult<Boolean> result = oauth2OpenController.revokeToken(request, token); // 断言 assertEquals(0, result.getCode()); assertTrue(result.getData()); }
@Override public RedisClusterNode clusterGetNodeForSlot(int slot) { Iterable<RedisClusterNode> res = clusterGetNodes(); for (RedisClusterNode redisClusterNode : res) { if (redisClusterNode.isMaster() && redisClusterNode.getSlotRange().contains(slot)) { return redisClusterNode; } } return null; }
@Test public void testClusterGetNodeForSlot() { RedisClusterNode node1 = connection.clusterGetNodeForSlot(1); RedisClusterNode node2 = connection.clusterGetNodeForSlot(16000); assertThat(node1.getId()).isNotEqualTo(node2.getId()); }
public Statement buildStatement(final ParserRuleContext parseTree) { return build(Optional.of(getSources(parseTree)), parseTree); }
@Test public void shouldThrowOnUnknownStarAlias() { // Given: final SingleStatementContext stmt = givenQuery("SELECT unknown.* FROM TEST1;"); // When: final Exception e = assertThrows( KsqlException.class, () -> builder.buildStatement(stmt) ); // Then: assertThat(e.getMessage(), containsString("'UNKNOWN' is not a valid stream/table name or alias.")); }
public Set<String> getPropertyKeys() { return propertyToBundles.keySet(); }
@Test public void introspect_all_available_properties() { assertThat(underTest.getPropertyKeys().contains("any")).isTrue(); // Only in english assertThat(underTest.getPropertyKeys().contains("assignee")).isTrue(); assertThat(underTest.getPropertyKeys().contains("sqale.page")).isTrue(); assertThat(underTest.getPropertyKeys().contains("bla_bla_bla")).isFalse(); }
@Override public ClientBuilder connectionMaxIdleSeconds(int connectionMaxIdleSeconds) { checkArgument(connectionMaxIdleSeconds < 0 || connectionMaxIdleSeconds >= ConnectionPool.IDLE_DETECTION_INTERVAL_SECONDS_MIN, "Connection idle detect interval seconds at least " + ConnectionPool.IDLE_DETECTION_INTERVAL_SECONDS_MIN + "."); conf.setConnectionMaxIdleSeconds(connectionMaxIdleSeconds); return this; }
@Test public void testConnectionMaxIdleSeconds() throws Exception { // test config disabled. PulsarClient.builder().connectionMaxIdleSeconds(-1); // test config correct PulsarClient.builder().connectionMaxIdleSeconds(60); // test config not correct. try { PulsarClient.builder().connectionMaxIdleSeconds(14); fail(); } catch (IllegalArgumentException e){ } }
@Override public void write(T record) { recordConsumer.startMessage(); try { messageWriter.writeTopLevelMessage(record); } catch (RuntimeException e) { Message m = (record instanceof Message.Builder) ? ((Message.Builder) record).build() : (Message) record; LOG.error("Cannot write message {}: {}", e.getMessage(), m); throw e; } recordConsumer.endMessage(); }
@Test public void testProto3SimplestMessage() throws Exception { RecordConsumer readConsumerMock = Mockito.mock(RecordConsumer.class); ProtoWriteSupport<TestProto3.InnerMessage> instance = createReadConsumerInstance(TestProto3.InnerMessage.class, readConsumerMock); TestProto3.InnerMessage.Builder msg = TestProto3.InnerMessage.newBuilder(); msg.setOne("oneValue"); instance.write(msg.build()); InOrder inOrder = Mockito.inOrder(readConsumerMock); inOrder.verify(readConsumerMock).startMessage(); inOrder.verify(readConsumerMock).startField("one", 0); inOrder.verify(readConsumerMock).addBinary(Binary.fromString("oneValue")); inOrder.verify(readConsumerMock).endField("one", 0); inOrder.verify(readConsumerMock).startField("two", 1); inOrder.verify(readConsumerMock).addBinary(Binary.fromString("")); inOrder.verify(readConsumerMock).endField("two", 1); inOrder.verify(readConsumerMock).startField("three", 2); inOrder.verify(readConsumerMock).addBinary(Binary.fromString("")); inOrder.verify(readConsumerMock).endField("three", 2); inOrder.verify(readConsumerMock).endMessage(); Mockito.verifyNoMoreInteractions(readConsumerMock); }
@Override public String typeForDataConnection(String name) { DataConnectionEntry dataConnection = dataConnections.get(name); if (dataConnection == null) { throw new HazelcastException("Data connection '" + name + "' not found"); } String type = dataConnectionClassToType.get(dataConnection.instance.getClass()); if (type == null) { throw new HazelcastException("Data connection type for class '" + dataConnection.getClass() + "' is not known"); } return type; }
@Test public void type_for_data_connection_should_throw_when_data_connection_does_not_exist() { assertThatThrownBy(() -> dataConnectionService.typeForDataConnection("non-existing-data-connection")) .isInstanceOf(HazelcastException.class) .hasMessage("Data connection 'non-existing-data-connection' not found"); }
@Override public URL getResource(String name) { URL url = null; for (ClassLoader classLoader : classLoaders) { url = classLoader.getResource(name); if (url != null) { break; } } if (url == null && LOG.isTraceEnabled()) { LOG.trace("Resource " + name + " not found."); } return url; }
@Test public void getResourceReturnsNullIfResourceDoesNotExist() throws Exception { final ChainingClassLoader chainingClassLoader = new ChainingClassLoader(getClass().getClassLoader()); final URL resource = chainingClassLoader.getResource("ThisClassHopeFullyDoesNotExist" + Instant.now().toEpochMilli()); assertThat(resource).isNull(); }
@UdafFactory(description = "collect distinct values of a Bigint field into a single Array") public static <T> Udaf<T, List<T>, List<T>> createCollectSetT() { return new Collect<>(); }
@Test public void shouldCollectDistinctTimestamps() { final Udaf<Timestamp, List<Timestamp>, List<Timestamp>> udaf = CollectSetUdaf.createCollectSetT(); final Timestamp[] values = new Timestamp[] {new Timestamp(1), new Timestamp(2)}; List<Timestamp> runningList = udaf.initialize(); for (final Timestamp i : values) { runningList = udaf.aggregate(i, runningList); } assertThat(runningList, contains(new Timestamp(1), new Timestamp(2))); }
static double toDouble(final JsonNode object) { if (object instanceof NumericNode) { return object.doubleValue(); } if (object instanceof TextNode) { try { return Double.parseDouble(object.textValue()); } catch (final NumberFormatException e) { throw failedStringCoercionException(SqlBaseType.DOUBLE); } } throw invalidConversionException(object, SqlBaseType.DOUBLE); }
@Test(expected = IllegalArgumentException.class) public void shouldNotConvertIncorrectStringToDouble() { JsonSerdeUtils.toDouble(JsonNodeFactory.instance.textNode("1!::")); }
public static Type arrayOf(Type elementType) { return TypeFactory.arrayOf(elementType); }
@Test public void createObjectArrayType() { assertThat(Types.arrayOf(Person.class)).isEqualTo(Person[].class); }
@Override public int read(long position, byte[] buffer, int offset, int length) throws IOException { // When bufferedPreadDisabled = true, this API does not use any shared buffer, // cursor position etc. So this is implemented as NOT synchronized. HBase // kind of random reads on a shared file input stream will greatly get // benefited by such implementation. // Strict close check at the begin of the API only not for the entire flow. synchronized (this) { if (closed) { throw new IOException(FSExceptionMessages.STREAM_IS_CLOSED); } } LOG.debug("pread requested offset = {} len = {} bufferedPreadDisabled = {}", offset, length, bufferedPreadDisabled); if (!bufferedPreadDisabled) { return super.read(position, buffer, offset, length); } validatePositionedReadArgs(position, buffer, offset, length); if (length == 0) { return 0; } if (streamStatistics != null) { streamStatistics.readOperationStarted(); } int bytesRead = readRemote(position, buffer, offset, length, tracingContext); if (statistics != null) { statistics.incrementBytesRead(bytesRead); } if (streamStatistics != null) { streamStatistics.bytesRead(bytesRead); } return bytesRead; }
@Test public void testReadAheadManagerForSuccessfulReadAhead() throws Exception { // Mock failure for client.read() AbfsClient client = getMockAbfsClient(); // Success operation mock AbfsRestOperation op = getMockRestOp(); // Stub : // Pass all readAheads and fail the post eviction request to // prove ReadAhead buffer is used doReturn(op) .doReturn(op) .doReturn(op) .doThrow(new TimeoutException("Internal Server error for RAH-X")) // for post eviction request .doThrow(new TimeoutException("Internal Server error for RAH-Y")) .doThrow(new TimeoutException("Internal Server error for RAH-Z")) .when(client) .read(any(String.class), any(Long.class), any(byte[].class), any(Integer.class), any(Integer.class), any(String.class), any(String.class), any(), any(TracingContext.class)); AbfsInputStream inputStream = getAbfsInputStream(client, "testSuccessfulReadAhead.txt"); queueReadAheads(inputStream); // AbfsInputStream Read would have waited for the read-ahead for the requested offset // as we are testing from ReadAheadManager directly, sleep for a sec to // get the read ahead threads to complete Thread.sleep(1000); // Only the 3 readAhead threads should have triggered client.read verifyReadCallCount(client, 3); // getBlock for a new read should return the buffer read-ahead int bytesRead = ReadBufferManager.getBufferManager().getBlock( inputStream, ONE_KB, ONE_KB, new byte[ONE_KB]); Assert.assertTrue("bytesRead should be non-zero from the " + "buffer that was read-ahead", bytesRead > 0); // Once created, mock will remember all interactions. // As the above read should not have triggered any server calls, total // number of read calls made at this point will be same as last. verifyReadCallCount(client, 3); // Stub will throw exception for client.read() for 4th and later calls // if not using the read-ahead buffer exception will be thrown on read checkEvictedStatus(inputStream, 0, true); }
@PostMapping() @TpsControl(pointName = "NamingServiceRegister", name = "HttpNamingServiceRegister") @Secured(action = ActionTypes.WRITE) public Result<String> create(ServiceForm serviceForm) throws Exception { serviceForm.validate(); ServiceMetadata serviceMetadata = new ServiceMetadata(); serviceMetadata.setProtectThreshold(serviceForm.getProtectThreshold()); serviceMetadata.setSelector(parseSelector(serviceForm.getSelector())); serviceMetadata.setExtendData(UtilsAndCommons.parseMetadata(serviceForm.getMetadata())); serviceMetadata.setEphemeral(serviceForm.getEphemeral()); serviceOperatorV2.create(Service.newService(serviceForm.getNamespaceId(), serviceForm.getGroupName(), serviceForm.getServiceName(), serviceForm.getEphemeral()), serviceMetadata); NotifyCenter.publishEvent( new RegisterServiceTraceEvent(System.currentTimeMillis(), serviceForm.getNamespaceId(), serviceForm.getGroupName(), serviceForm.getServiceName())); return Result.success("ok"); }
@Test void testCreate() throws Exception { ServiceForm serviceForm = new ServiceForm(); serviceForm.setNamespaceId(Constants.DEFAULT_NAMESPACE_ID); serviceForm.setServiceName("service"); serviceForm.setGroupName(Constants.DEFAULT_GROUP); serviceForm.setEphemeral(true); serviceForm.setProtectThreshold(0.0F); serviceForm.setMetadata(""); serviceForm.setSelector(""); Result<String> actual = serviceController.create(serviceForm); verify(serviceOperatorV2).create(eq(Service.newService(Constants.DEFAULT_NAMESPACE_ID, Constants.DEFAULT_GROUP, "service")), any(ServiceMetadata.class)); assertEquals(ErrorCode.SUCCESS.getCode(), actual.getCode()); assertEquals("ok", actual.getData()); }
@Override public File getFile(TransientBlobKey key) throws IOException { return getFileInternal(null, key); }
@Test void transientBlobCacheCanServeFilesFromPrepopulatedStorageDirectory( @TempDir Path storageDirectory) throws IOException { final JobID jobId = new JobID(); final TransientBlobKey blobKey = TestingBlobUtils.writeTransientBlob( storageDirectory, jobId, new byte[] {1, 2, 3, 4}); try (final TransientBlobCache transientBlobCache = new TransientBlobCache(new Configuration(), storageDirectory.toFile(), null)) { transientBlobCache.getFile(jobId, blobKey); } }
public void validate(final Metric metric) { if (metric == null) { throw new ValidationException("Metric cannot be null"); } if (!isValidFunction(metric.functionName())) { throw new ValidationException("Unrecognized metric : " + metric.functionName() + ", valid metrics : " + availableMetricTypes); } if (!hasFieldIfFunctionNeedsIt(metric)) { throw new ValidationException(metric.functionName() + " metric requires field name to be provided after a colon, i.e. " + metric.functionName() + ":http_status_code"); } if (metric.sort() != null && UNSORTABLE_METRICS.contains(metric.functionName())) { throw new ValidationException(metric.functionName() + " metric cannot be used to sort aggregations"); } }
@Test void throwsExceptionOnNullMetric() { assertThrows(ValidationException.class, () -> toTest.validate(null)); }
public TpsCheckResponse check(TpsCheckRequest tpsRequest) { if (points.containsKey(tpsRequest.getPointName())) { try { return points.get(tpsRequest.getPointName()).applyTps(tpsRequest); } catch (Throwable throwable) { Loggers.TPS.warn("[{}]apply tps error,error={}", tpsRequest.getPointName(), throwable); } } return new TpsCheckResponse(true, TpsResultCode.CHECK_SKIP, "skip"); }
@Test void testCheck() { NacosTpsControlManager nacosTpsControlManager = new NacosTpsControlManager(); nacosTpsControlManager.registerTpsPoint("test"); final TpsControlRule tpsLimitRule = new TpsControlRule(); RuleDetail ruleDetail = new RuleDetail(); ruleDetail.setMaxCount(5); ruleDetail.setMonitorType(MonitorType.INTERCEPT.getType()); ruleDetail.setPeriod(TimeUnit.SECONDS); tpsLimitRule.setPointRule(ruleDetail); tpsLimitRule.setPointName("test"); nacosTpsControlManager.applyTpsRule("test", tpsLimitRule); long timeMillis = System.currentTimeMillis(); TpsCheckRequest tpsCheckRequest = new TpsCheckRequest(); tpsCheckRequest.setPointName("test"); tpsCheckRequest.setTimestamp(timeMillis); TpsCheckResponse check = nacosTpsControlManager.check(tpsCheckRequest); assertTrue(check.isSuccess()); }
public Optional<Measure> toMeasure(@Nullable LiveMeasureDto measureDto, Metric metric) { requireNonNull(metric); if (measureDto == null) { return Optional.empty(); } Double value = measureDto.getValue(); String data = measureDto.getDataAsString(); switch (metric.getType().getValueType()) { case INT: return toIntegerMeasure(value, data); case LONG: return toLongMeasure(value, data); case DOUBLE: return toDoubleMeasure(value, data); case BOOLEAN: return toBooleanMeasure(value, data); case STRING: return toStringMeasure(data); case LEVEL: return toLevelMeasure(data); case NO_VALUE: return toNoValueMeasure(); default: throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType()); } }
@Test public void toMeasure_throws_NPE_if_metric_argument_is_null() { assertThatThrownBy(() -> underTest.toMeasure(EMPTY_MEASURE_DTO, null)) .isInstanceOf(NullPointerException.class); }
public static Sensor activeBufferedRecordsSensor(final String threadId, final String taskId, final StreamsMetricsImpl streamsMetrics) { final String name = ACTIVE_TASK_PREFIX + BUFFER_COUNT; final Sensor sensor = streamsMetrics.taskLevelSensor(threadId, taskId, name, Sensor.RecordingLevel.DEBUG); addValueMetricToSensor( sensor, TASK_LEVEL_GROUP, streamsMetrics.taskLevelTagMap(threadId, taskId), name, NUM_BUFFERED_RECORDS_DESCRIPTION ); return sensor; }
@Test public void shouldGetActiveBufferCountSensor() { final String operation = "active-buffer-count"; when(streamsMetrics.taskLevelSensor(THREAD_ID, TASK_ID, operation, RecordingLevel.DEBUG)) .thenReturn(expectedSensor); final String countDescription = "The count of buffered records that are polled " + "from consumer and not yet processed for this active task"; when(streamsMetrics.taskLevelTagMap(THREAD_ID, TASK_ID)).thenReturn(tagMap); try (final MockedStatic<StreamsMetricsImpl> streamsMetricsStaticMock = mockStatic(StreamsMetricsImpl.class)) { final Sensor sensor = TaskMetrics.activeBufferedRecordsSensor(THREAD_ID, TASK_ID, streamsMetrics); streamsMetricsStaticMock.verify( () -> StreamsMetricsImpl.addValueMetricToSensor( expectedSensor, TASK_LEVEL_GROUP, tagMap, operation, countDescription ) ); assertThat(sensor, is(expectedSensor)); } }