focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static <K, V> Reshuffle<K, V> of() { return new Reshuffle<>(); }
@Test @Category(ValidatesRunner.class) public void testReshuffleAfterSlidingWindowsAndGroupByKey() { PCollection<KV<String, Iterable<Integer>>> input = pipeline .apply( Create.of(GBK_TESTABLE_KVS) .withCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()))) .apply(Window.into(FixedWindows.of(Duration.standardMinutes(10L)))) .apply(GroupByKey.create()); PCollection<KV<String, Iterable<Integer>>> output = input.apply(Reshuffle.of()); PAssert.that(output).satisfies(new AssertThatHasExpectedContents()); assertEquals(input.getWindowingStrategy(), output.getWindowingStrategy()); pipeline.run(); }
public void start() { checkState(!closed, "The RPC connection is already closed"); checkState( !isConnected() && pendingRegistration == null, "The RPC connection is already started"); final RetryingRegistration<F, G, S, R> newRegistration = createNewRegistration(); if (REGISTRATION_UPDATER.compareAndSet(this, null, newRegistration)) { newRegistration.startRegistration(); } else { // concurrent start operation newRegistration.cancel(); } }
@Test void testRpcConnectionRejectionCallsOnRegistrationRejection() { TestRegistrationGateway testRegistrationGateway = DefaultTestRegistrationGateway.newBuilder() .setRegistrationFunction( (uuid, aLong) -> CompletableFuture.completedFuture( new TestRegistrationRejection( TestRegistrationRejection.RejectionReason .REJECTED))) .build(); rpcService.registerGateway(testRegistrationGateway.getAddress(), testRegistrationGateway); TestRpcConnection connection = new TestRpcConnection( testRegistrationGateway.getAddress(), UUID.randomUUID(), rpcService.getScheduledExecutor(), rpcService); connection.start(); final Either<TestRegistrationSuccess, TestRegistrationRejection> connectionResult = connection.getConnectionFuture().join(); assertThat(connectionResult.isRight()).isTrue(); final TestRegistrationRejection registrationRejection = connectionResult.right(); assertThat(registrationRejection.getRejectionReason()) .isEqualTo(TestRegistrationRejection.RejectionReason.REJECTED); }
static ViewRepresentation fromJson(String json) { return JsonUtil.parse(json, ViewRepresentationParser::fromJson); }
@Test public void testViewRepresentationMissingType() { assertThatThrownBy(() -> ViewRepresentationParser.fromJson("{\"sql\":\"select * from foo\"}")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse missing string: type"); }
public ReferenceConfig<GenericService> initRef(final MetaData metaData) { try { ReferenceConfig<GenericService> referenceConfig = cache.get(metaData.getPath()); if (StringUtils.isNoneBlank(referenceConfig.getInterface())) { return referenceConfig; } } catch (ExecutionException e) { LOG.error("init dubbo ref exception", e); } return build(metaData, ""); }
@Test public void testInitRef() { MetaData metaData = new MetaData(); metaData.setPath("/test"); ApacheDubboConfigCache apacheDubboConfigCacheMock = mock(ApacheDubboConfigCache.class); when(apacheDubboConfigCacheMock.initRef(metaData)) .thenReturn(new org.apache.dubbo.config.ReferenceConfig<>()); assertNotNull(apacheDubboConfigCacheMock.initRef(metaData)); }
public static String toString(byte[] bytes, String charsetName) throws UnsupportedEncodingException { return charsetName == null ? new String(bytes) : new String(bytes, charsetName); }
@Test public void testToString() { Assert.assertEquals(null, StringUtils.toString(null)); Assert.assertEquals("Bean:11", StringUtils.toString(new Bean("11"))); Assert.assertEquals(null, StringUtils.toString((Object) null, null)); Assert.assertEquals("1", StringUtils.toString((Object) null, "1")); Assert.assertEquals("Bean:11", StringUtils.toString(new Bean("11"), null)); Assert.assertEquals(null, StringUtils.objectsToString(null)); Assert.assertEquals("[]", StringUtils.objectsToString(new Object[0])); Assert.assertEquals("[1,22]", StringUtils.objectsToString(new Object[] { 1, "22" })); Assert.assertEquals("[1,Bean:11]", StringUtils.objectsToString(new Object[] { 1, new Bean("11") })); }
public static PathOutputCommitter createCommitter(Path outputPath, TaskAttemptContext context) throws IOException { return getCommitterFactory(outputPath, context.getConfiguration()) .createOutputCommitter(outputPath, context); }
@Test public void testCommitterFallbackDefault() throws Throwable { createCommitter(FileOutputCommitter.class, HDFS_PATH, taskAttempt(newBondedConfiguration())); }
public boolean setCleanCodeAttribute(DefaultIssue raw, @Nullable CleanCodeAttribute previousCleanCodeAttribute, IssueChangeContext changeContext) { CleanCodeAttribute newCleanCodeAttribute = requireNonNull(raw.getCleanCodeAttribute()); if (Objects.equals(previousCleanCodeAttribute, newCleanCodeAttribute)) { return false; } raw.setFieldChange(changeContext, CLEAN_CODE_ATTRIBUTE, previousCleanCodeAttribute, newCleanCodeAttribute.name()); raw.setCleanCodeAttribute(newCleanCodeAttribute); raw.setUpdateDate(changeContext.date()); raw.setChanged(true); return true; }
@Test void setCleanCodeAttribute_whenCleanCodeAttributeChanged_shouldUpdateIssue() { issue.setCleanCodeAttribute(CleanCodeAttribute.CLEAR); boolean updated = underTest.setCleanCodeAttribute(issue, CleanCodeAttribute.COMPLETE, context); assertThat(updated).isTrue(); assertThat(issue.getCleanCodeAttribute()).isEqualTo(CleanCodeAttribute.CLEAR); assertThat(issue.currentChange().get("cleanCodeAttribute")) .extracting(FieldDiffs.Diff::oldValue, FieldDiffs.Diff::newValue) .containsExactly(CleanCodeAttribute.COMPLETE, CleanCodeAttribute.CLEAR.name()); }
public Ce.Task formatActivity(DbSession dbSession, CeActivityDto dto, @Nullable String scannerContext) { return formatActivity(dto, DtoCache.forActivityDtos(dbClient, dbSession, singletonList(dto)), scannerContext); }
@Test public void formatActivity() { UserDto user = db.users().insertUser(); CeActivityDto dto = newActivity("UUID", "COMPONENT_UUID", CeActivityDto.Status.FAILED, user); Ce.Task wsTask = underTest.formatActivity(db.getSession(), dto, null); assertThat(wsTask.getType()).isEqualTo(CeTaskTypes.REPORT); assertThat(wsTask.getId()).isEqualTo("UUID"); assertThat(wsTask.getNodeName()).isEqualTo(NODE_NAME); assertThat(wsTask.getStatus()).isEqualTo(Ce.TaskStatus.FAILED); assertThat(wsTask.getSubmittedAt()).isEqualTo(DateUtils.formatDateTime(new Date(1_450_000_000_000L))); assertThat(wsTask.getSubmitterLogin()).isEqualTo(user.getLogin()); assertThat(wsTask.getExecutionTimeMs()).isEqualTo(500L); assertThat(wsTask.getAnalysisId()).isEqualTo("U1"); assertThat(wsTask.hasScannerContext()).isFalse(); assertThat(wsTask.getWarningCount()).isZero(); assertThat(wsTask.getWarningsList()).isEmpty(); assertThat(wsTask.getInfoMessagesList()).isEmpty(); }
@Override public void validateConnectorConfig(Map<String, String> connectorProps, Callback<ConfigInfos> callback) { validateConnectorConfig(connectorProps, callback, true); }
@Test public void testConfigValidationTopicsWithDlq() { final Class<? extends Connector> connectorClass = SampleSinkConnector.class; AbstractHerder herder = createConfigValidationHerder(connectorClass, noneConnectorClientConfigOverridePolicy); Map<String, String> config = new HashMap<>(); config.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, connectorClass.getName()); config.put(SinkConnectorConfig.TOPICS_CONFIG, "topic1"); config.put(SinkConnectorConfig.DLQ_TOPIC_NAME_CONFIG, "topic1"); ConfigInfos validation = herder.validateConnectorConfig(config, s -> null, false); ConfigInfo topicsListInfo = findInfo(validation, SinkConnectorConfig.TOPICS_CONFIG); assertNotNull(topicsListInfo); assertEquals(1, topicsListInfo.configValue().errors().size()); verifyValidationIsolation(); }
@Override public JDocCommentable apply(String nodeName, JsonNode node, JsonNode parent, JDocCommentable generatableType, Schema schema) { if (node.asBoolean()) { generatableType.javadoc().append("\n(Required)"); if (ruleFactory.getGenerationConfig().isIncludeJsr303Annotations() && generatableType instanceof JFieldVar) { final Class<? extends Annotation> notNullClass = ruleFactory.getGenerationConfig().isUseJakartaValidation() ? NotNull.class : javax.validation.constraints.NotNull.class; ((JFieldVar) generatableType).annotate(notNullClass); } if (ruleFactory.getGenerationConfig().isIncludeJsr305Annotations() && generatableType instanceof JFieldVar) { ((JFieldVar) generatableType).annotate(Nonnull.class); } } else { if (ruleFactory.getGenerationConfig().isIncludeJsr305Annotations() && generatableType instanceof JFieldVar) { ((JFieldVar) generatableType).annotate(Nullable.class); } } return generatableType; }
@Test public void applyAddsTextWhenRequired() throws JClassAlreadyExistsException { JDefinedClass jclass = new JCodeModel()._class(TARGET_CLASS_NAME); ObjectMapper mapper = new ObjectMapper(); BooleanNode descriptionNode = mapper.createObjectNode().booleanNode(true); JDocCommentable result = rule.apply("fooBar", descriptionNode, null, jclass, null); assertThat(result.javadoc(), sameInstance(jclass.javadoc())); assertThat(result.javadoc().size(), is(1)); assertThat((String) result.javadoc().get(0), is("\n(Required)")); }
void format(FSNamesystem fsn, String clusterId, boolean force) throws IOException { long fileCount = fsn.getFilesTotal(); // Expect 1 file, which is the root inode Preconditions.checkState(fileCount == 1, "FSImage.format should be called with an uninitialized namesystem, has " + fileCount + " files"); NamespaceInfo ns = NNStorage.newNamespaceInfo(); LOG.info("Allocated new BlockPoolId: " + ns.getBlockPoolID()); ns.clusterID = clusterId; storage.format(ns); editLog.formatNonFileJournals(ns, force); saveFSImageInAllDirs(fsn, 0); }
@Test public void testRemovalStaleFsimageCkpt() throws IOException { MiniDFSCluster cluster = null; SecondaryNameNode secondary = null; Configuration conf = new HdfsConfiguration(); try { cluster = new MiniDFSCluster.Builder(conf). numDataNodes(1).format(true).build(); conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, "0.0.0.0:0"); secondary = new SecondaryNameNode(conf); // Do checkpointing secondary.doCheckpoint(); NNStorage storage = secondary.getFSImage().storage; File currentDir = FSImageTestUtil. getCurrentDirs(storage, NameNodeDirType.IMAGE).get(0); // Create a stale fsimage.ckpt file File staleCkptFile = new File(currentDir.getPath() + "/fsimage.ckpt_0000000000000000002"); staleCkptFile.createNewFile(); assertTrue(staleCkptFile.exists()); // After checkpoint stale fsimage.ckpt file should be deleted secondary.doCheckpoint(); assertFalse(staleCkptFile.exists()); } finally { if (secondary != null) { secondary.shutdown(); secondary = null; } if (cluster != null) { cluster.shutdown(); cluster = null; } } }
public static String getSelectQuery(@Nullable String table, @Nullable String query) { if (table != null && query != null) { throw new IllegalArgumentException("withTable() can not be used together with withQuery()"); } else if (table != null) { return "SELECT * FROM " + SingleStoreUtil.escapeIdentifier(table); } else if (query != null) { return query; } else { throw new IllegalArgumentException("One of withTable() or withQuery() is required"); } }
@Test public void testGetSelectQueryNonNullTable() { assertEquals("SELECT * FROM `ta``ble`", SingleStoreUtil.getSelectQuery("ta`ble", null)); }
static int getRightMostLeafUnderNode(int nodeOrder, int depth) { if (isLeaf(nodeOrder, depth)) { return nodeOrder; } int levelOfNode = getLevelOfNode(nodeOrder); int distanceFromLeafLevel = depth - levelOfNode - 1; int leftMostLeafUnderNode = getLeftMostLeafUnderNode(nodeOrder, depth); int leavesOfSubtreeUnderNode = getNodesOnLevel(distanceFromLeafLevel); return leftMostLeafUnderNode + leavesOfSubtreeUnderNode - 1; }
@Test public void testGetRightMostLeafUnderNode() { // depth 1 assertEquals(0, MerkleTreeUtil.getRightMostLeafUnderNode(0, 1)); // depth 2 assertEquals(2, MerkleTreeUtil.getRightMostLeafUnderNode(0, 2)); assertEquals(1, MerkleTreeUtil.getRightMostLeafUnderNode(1, 2)); assertEquals(2, MerkleTreeUtil.getRightMostLeafUnderNode(2, 2)); // depth 3 assertEquals(6, MerkleTreeUtil.getRightMostLeafUnderNode(0, 3)); assertEquals(4, MerkleTreeUtil.getRightMostLeafUnderNode(1, 3)); assertEquals(6, MerkleTreeUtil.getRightMostLeafUnderNode(2, 3)); // depth 4 assertEquals(14, MerkleTreeUtil.getRightMostLeafUnderNode(0, 4)); assertEquals(10, MerkleTreeUtil.getRightMostLeafUnderNode(1, 4)); assertEquals(14, MerkleTreeUtil.getRightMostLeafUnderNode(2, 4)); assertEquals(8, MerkleTreeUtil.getRightMostLeafUnderNode(3, 4)); assertEquals(10, MerkleTreeUtil.getRightMostLeafUnderNode(4, 4)); assertEquals(12, MerkleTreeUtil.getRightMostLeafUnderNode(5, 4)); assertEquals(14, MerkleTreeUtil.getRightMostLeafUnderNode(6, 4)); }
public Boolean clearSeckillEndFlag(long seckillId, String taskId) { return stringRedisTemplate.delete("goodskill:seckill:end:notice" + seckillId + ":" + taskId); }
@Test void testClearSeckillEndFlag() { Boolean result = redisService.clearSeckillEndFlag(0L, "1"); Assertions.assertEquals(Boolean.FALSE, result); }
@Override public String getStatementName(StatementContext statementContext) { final ExtensionMethod extensionMethod = statementContext.getExtensionMethod(); if (extensionMethod == null) { return null; } final Class<?> clazz = extensionMethod.getType(); final Timed classTimed = clazz.getAnnotation(Timed.class); final Method method = extensionMethod.getMethod(); final Timed methodTimed = method.getAnnotation(Timed.class); // If the method is timed, figure out the name if (methodTimed != null) { String methodName = methodTimed.name().isEmpty() ? method.getName() : methodTimed.name(); if (methodTimed.absolute()) { return methodName; } else { // We need to check if the class has a custom timer name return classTimed == null || classTimed.name().isEmpty() ? MetricRegistry.name(clazz, methodName) : MetricRegistry.name(classTimed.name(), methodName); } } else if (classTimed != null) { // Maybe the class is timed? return classTimed.name().isEmpty() ? MetricRegistry.name(clazz, method.getName()) : MetricRegistry.name(classTimed.name(), method.getName()); } else { // No timers neither on the method or the class return null; } }
@Test public void testNoMethod() { assertThat(timedAnnotationNameStrategy.getStatementName(ctx)).isNull(); }
public String toLogctlModSpec() { var spec = new StringBuilder(); boolean comma = false; for (var entry : levelMods.entrySet()) { if (comma) { spec.append(","); } spec.append(entry.getKey()); spec.append("="); spec.append(entry.getValue()); comma = true; } return spec.toString(); }
@Test public void hasCorrectDefault() { String wanted = "fatal=on,error=on,warning=on,info=on,event=on,config=on,debug=off,spam=off"; var l = new LevelsModSpec(); assertEquals(wanted, l.toLogctlModSpec()); }
@Deprecated @Override public Boolean hasAppendsOnly(org.apache.hadoop.hive.ql.metadata.Table hmsTable, SnapshotContext since) { TableDesc tableDesc = Utilities.getTableDesc(hmsTable); Table table = IcebergTableUtil.getTable(conf, tableDesc.getProperties()); return hasAppendsOnly(table.snapshots(), since); }
@Test public void testHasAppendsOnlyFalseWhenNotOnlyAppendsAfterGivenSnapshot() { SnapshotContext since = new SnapshotContext(42); List<Snapshot> snapshotList = Arrays.asList(anySnapshot, appendSnapshot, deleteSnapshot); HiveIcebergStorageHandler storageHandler = new HiveIcebergStorageHandler(); Boolean result = storageHandler.hasAppendsOnly(snapshotList, since); assertThat(result, is(false)); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof BatchInstanceRedoData)) { return false; } if (!super.equals(o)) { return false; } BatchInstanceRedoData redoData = (BatchInstanceRedoData) o; return Objects.equals(instances, redoData.instances); }
@Test @SuppressWarnings("all") void testEquals() { BatchInstanceRedoData redoData1 = new BatchInstanceRedoData("a", "b"); redoData1.setInstances(Collections.singletonList(new Instance())); BatchInstanceRedoData redoData2 = new BatchInstanceRedoData("a", "b"); redoData2.setInstances(Collections.singletonList(new Instance())); assertEquals(redoData1, redoData1); assertEquals(redoData1, redoData2); redoData2.getInstances().get(0).setIp("1.1.1.1"); assertNotEquals(null, redoData1); assertNotEquals(redoData1, redoData2); assertNotEquals(redoData1, redoData2); BatchInstanceRedoData redoData3 = new BatchInstanceRedoData("c", "b"); assertNotEquals(redoData1, redoData3); }
@Override public Object invoke(MethodInvocation methodInvocation) throws Throwable { // 入栈 DataPermission dataPermission = this.findAnnotation(methodInvocation); if (dataPermission != null) { DataPermissionContextHolder.add(dataPermission); } try { // 执行逻辑 return methodInvocation.proceed(); } finally { // 出栈 if (dataPermission != null) { DataPermissionContextHolder.remove(); } } }
@Test // 在 Class 上有 @DataPermission 注解 public void testInvoke_class() throws Throwable { // 参数 mockMethodInvocation(TestClass.class); // 调用 Object result = interceptor.invoke(methodInvocation); // 断言 assertEquals("class", result); assertEquals(1, interceptor.getDataPermissionCache().size()); assertFalse(CollUtil.getFirst(interceptor.getDataPermissionCache().values()).enable()); }
@Override public void unsubscribe(String serviceName, EventListener listener) throws NacosException { unsubscribe(serviceName, new ArrayList<>(), listener); }
@Test void testUnSubscribe4() throws NacosException { //given String serviceName = "service1"; String groupName = "group1"; List<String> clusterList = Arrays.asList("cluster1", "cluster2"); EventListener listener = event -> { }; when(changeNotifier.isSubscribed(groupName, serviceName)).thenReturn(false); //when client.unsubscribe(serviceName, groupName, clusterList, listener); NamingSelectorWrapper wrapper = new NamingSelectorWrapper(NamingSelectorFactory.newClusterSelector(clusterList), listener); //then verify(changeNotifier, times(1)).deregisterListener(groupName, serviceName, wrapper); verify(proxy, times(1)).unsubscribe(serviceName, groupName, Constants.NULL); }
public EvictionConfig getEvictionConfig() { return evictionConfig; }
@Test public void testGetMaxSize() { assertEquals(MapConfig.DEFAULT_MAX_SIZE, new MapConfig().getEvictionConfig().getSize()); }
public static <T> Key<T> newKey(String keyName) { return new Key<>(keyName); }
@Test void newKeyFailsOnNull() { assertThrows(NullPointerException.class, () -> Attrs.newKey(null)); }
public static <T> RetryTransformer<T> of(Retry retry) { return new RetryTransformer<>(retry); }
@Test public void returnOnCompleteUsingFlowable() throws InterruptedException { RetryConfig config = retryConfig(); Retry retry = Retry.of("testName", config); RetryTransformer<Object> retryTransformer = RetryTransformer.of(retry); given(helloWorldService.returnHelloWorld()) .willThrow(new HelloWorldException()); Flowable.fromCallable(helloWorldService::returnHelloWorld) .compose(retryTransformer) .test() .await() .assertError(HelloWorldException.class) .assertNotComplete() .assertSubscribed(); Flowable.fromCallable(helloWorldService::returnHelloWorld) .compose(retryTransformer) .test() .await() .assertError(HelloWorldException.class) .assertNotComplete() .assertSubscribed(); then(helloWorldService).should(times(6)).returnHelloWorld(); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfFailedCallsWithRetryAttempt()).isEqualTo(2); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isZero(); }
public static String checkNonEmptyAfterTrim(final String value, final String name) { String trimmed = checkNotNull(value, name).trim(); return checkNonEmpty(trimmed, name); }
@Test public void testCheckNonEmptyAfterTrim() { Exception actualEx = null; try { ObjectUtil.checkNonEmptyAfterTrim((String) NULL_OBJECT, NULL_NAME); } catch (Exception e) { actualEx = e; } assertNotNull(actualEx, TEST_RESULT_NULLEX_OK); assertTrue(actualEx instanceof NullPointerException, TEST_RESULT_EXTYPE_NOK); actualEx = null; try { ObjectUtil.checkNonEmptyAfterTrim((String) NON_NULL_OBJECT, NON_NULL_NAME); } catch (Exception e) { actualEx = e; } assertNull(actualEx, TEST_RESULT_NULLEX_NOK); actualEx = null; try { ObjectUtil.checkNonEmptyAfterTrim(NON_NULL_EMPTY_STRING, NON_NULL_EMPTY_NAME); } catch (Exception e) { actualEx = e; } assertNotNull(actualEx, TEST_RESULT_NULLEX_OK); assertTrue(actualEx instanceof IllegalArgumentException, TEST_RESULT_EXTYPE_NOK); actualEx = null; try { ObjectUtil.checkNonEmptyAfterTrim(NON_NULL_WHITESPACE_STRING, NON_NULL_EMPTY_NAME); } catch (Exception e) { actualEx = e; } assertNotNull(actualEx, TEST_RESULT_NULLEX_OK); assertTrue(actualEx instanceof IllegalArgumentException, TEST_RESULT_EXTYPE_NOK); }
@ExecuteOn(TaskExecutors.IO) @Post(uri = "/{namespace}/{flowId}/{triggerId}/restart") @Operation(tags = {"Triggers"}, summary = "Restart a trigger") public HttpResponse<?> restart( @Parameter(description = "The namespace") @PathVariable String namespace, @Parameter(description = "The flow id") @PathVariable String flowId, @Parameter(description = "The trigger id") @PathVariable String triggerId ) throws HttpStatusException { Optional<Trigger> triggerOpt = triggerRepository.findLast(TriggerContext.builder() .tenantId(tenantService.resolveTenant()) .namespace(namespace) .flowId(flowId) .triggerId(triggerId) .build()); if (triggerOpt.isEmpty()) { return HttpResponse.notFound(); } var trigger = triggerOpt.get().toBuilder() .workerId(null) .evaluateRunningDate(null) .date(null) .build(); this.executionKilledQueue.emit(ExecutionKilledTrigger .builder() .tenantId(trigger.getTenantId()) .namespace(trigger.getNamespace()) .flowId(trigger.getFlowId()) .triggerId(trigger.getTriggerId()) .build() ); // this will make the trigger restarting // be careful that, as everything is asynchronous, it can be restarted before it is killed this.triggerQueue.emit(trigger); return HttpResponse.ok(trigger); }
@Test void restart() { Flow flow = generateFlow("flow-with-triggers"); jdbcFlowRepository.create(flow, flow.generateSource(), flow); Trigger trigger = Trigger.builder() .flowId(flow.getId()) .namespace(flow.getNamespace()) .triggerId("trigger-to-restart") .executionId(IdUtils.create()) .disabled(true) .build(); jdbcTriggerRepository.create(trigger); HttpResponse<?> restarted = client.toBlocking().exchange(HttpRequest.POST(("/api/v1/triggers/io.kestra.tests.schedule/flow-with-triggers/trigger-to-restart/restart"), null)); assertThat(restarted.getStatus(), is(HttpStatus.OK)); assertThrows(HttpClientResponseException.class, () -> client.toBlocking().exchange(HttpRequest.POST(("/api/v1/triggers/notfound/notfound/notfound/restart"), null))); }
@Override public boolean add(final Integer value) { return add(value.intValue()); }
@Test public void iteratorsListElements() { set.add(1); set.add(2); assertIteratorHasElements(); }
@Override public long globalCount() { return count(InputImpl.class, new BasicDBObject(MessageInput.FIELD_GLOBAL, true)); }
@Test @MongoDBFixtures("InputServiceImplTest.json") public void globalCountReturnsNumberOfGlobalInputs() { assertThat(inputService.globalCount()).isEqualTo(1); }
@Override public void onIssue(Component component, DefaultIssue issue) { if (issue.authorLogin() != null) { return; } loadScmChangesets(component); Optional<String> scmAuthor = guessScmAuthor(issue, component); if (scmAuthor.isPresent()) { if (scmAuthor.get().length() <= IssueDto.AUTHOR_MAX_SIZE) { issueUpdater.setNewAuthor(issue, scmAuthor.get(), changeContext); } else { LOGGER.debug("SCM account '{}' is too long to be stored as issue author", scmAuthor.get()); } } if (issue.assignee() == null) { UserIdDto userId = scmAuthor.map(scmAccountToUser::getNullable).orElse(defaultAssignee.loadDefaultAssigneeUserId()); issueUpdater.setNewAssignee(issue, userId, changeContext); } }
@Test void assign_to_last_committer_of_file_if_issue_is_global_to_file() { addScmUser("henry", buildUserId("u123", "Henry V")); Changeset changeset1 = Changeset.newChangesetBuilder() .setAuthor("john") .setDate(1_000L) .setRevision("rev-1") .build(); // Latest changeset Changeset changeset2 = Changeset.newChangesetBuilder() .setAuthor("henry") .setDate(2_000L) .setRevision("rev-2") .build(); scmInfoRepository.setScmInfo(FILE_REF, changeset1, changeset2, changeset1); DefaultIssue issue = newIssueOnLines(); underTest.onIssue(FILE, issue); assertThat(issue.assignee()).isEqualTo("u123"); assertThat(issue.assigneeLogin()).isEqualTo("Henry V"); }
public void recordLatency(String node, long requestLatencyMs) { if (!node.isEmpty()) { String nodeTimeName = "node-" + node + ".latency"; Sensor nodeRequestTime = this.metrics.getSensor(nodeTimeName); if (nodeRequestTime != null) nodeRequestTime.record(requestLatencyMs); } }
@Test public void testMultiNodeLatency() { String connectionId0 = "0"; MetricName nodeLatencyAvg0 = metrics.metricName("request-latency-avg", group, genericTag(connectionId0)); MetricName nodeLatencyMax0 = metrics.metricName("request-latency-max", group, genericTag(connectionId0)); registerNodeLatencyMetric(connectionId0, nodeLatencyAvg0, nodeLatencyMax0); adminFetchMetricsManager.recordLatency(connectionId0, 5); adminFetchMetricsManager.recordLatency(connectionId0, 8); // Record metric against another node. String connectionId1 = "1"; MetricName nodeLatencyAvg1 = metrics.metricName("request-latency-avg", group, genericTag(connectionId1)); MetricName nodeLatencyMax1 = metrics.metricName("request-latency-max", group, genericTag(connectionId1)); registerNodeLatencyMetric(connectionId1, nodeLatencyAvg1, nodeLatencyMax1); adminFetchMetricsManager.recordLatency(connectionId1, 105); adminFetchMetricsManager.recordLatency(connectionId1, 108); assertEquals(6.5, metricValue(nodeLatencyAvg0), EPSILON); assertEquals(8, metricValue(nodeLatencyMax0), EPSILON); assertEquals(106.5, metricValue(nodeLatencyAvg1), EPSILON); assertEquals(108, metricValue(nodeLatencyMax1), EPSILON); mockSleepTimeWindow(); adminFetchMetricsManager.recordLatency(connectionId0, 11); adminFetchMetricsManager.recordLatency(connectionId1, 111); assertEquals(8, metricValue(nodeLatencyAvg0), EPSILON); assertEquals(11, metricValue(nodeLatencyMax0), EPSILON); assertEquals(108, metricValue(nodeLatencyAvg1), EPSILON); assertEquals(111, metricValue(nodeLatencyMax1), EPSILON); mockSleepTimeWindow(); assertEquals(11, metricValue(nodeLatencyAvg0), EPSILON); assertEquals(11, metricValue(nodeLatencyMax0), EPSILON); assertEquals(111, metricValue(nodeLatencyAvg1), EPSILON); assertEquals(111, metricValue(nodeLatencyMax1), EPSILON); mockSleepTimeWindow(); assertTrue(Double.isNaN(metricValue(nodeLatencyAvg0))); assertTrue(Double.isNaN(metricValue(nodeLatencyMax0))); assertTrue(Double.isNaN(metricValue(nodeLatencyAvg1))); assertTrue(Double.isNaN(metricValue(nodeLatencyMax1))); adminFetchMetricsManager.recordLatency(connectionId0, 500); adminFetchMetricsManager.recordLatency(connectionId0, 600); mockSleepTimeWindow(); adminFetchMetricsManager.recordLatency(connectionId1, 800); adminFetchMetricsManager.recordLatency(connectionId1, 900); assertEquals(550, metricValue(nodeLatencyAvg0), EPSILON); assertEquals(600, metricValue(nodeLatencyMax0), EPSILON); assertEquals(850, metricValue(nodeLatencyAvg1), EPSILON); assertEquals(900, metricValue(nodeLatencyMax1), EPSILON); mockSleepTimeWindow(); assertTrue(Double.isNaN(metricValue(nodeLatencyAvg0))); assertTrue(Double.isNaN(metricValue(nodeLatencyMax0))); assertEquals(850, metricValue(nodeLatencyAvg1), EPSILON); assertEquals(900, metricValue(nodeLatencyMax1), EPSILON); mockSleepTimeWindow(); assertTrue(Double.isNaN(metricValue(nodeLatencyAvg1))); assertTrue(Double.isNaN(metricValue(nodeLatencyMax1))); }
@Override public final int skipBytes(final int n) { if (n <= 0) { return 0; } int skip = n; final int pos = position(); if (pos + skip > size) { skip = size - pos; } position(pos + skip); return skip; }
@Test public void testSkipBytes() { int s1 = in.skipBytes(-1); int s2 = in.skipBytes(1); in.position(0); int maxSkipBytes = in.available(); int s3 = in.skipBytes(INIT_DATA.length); assertEquals(0, s1); assertEquals(1, s2); //skipBytes skips at most available bytes assertEquals(maxSkipBytes, s3); }
@JsonProperty public void setArchivedLogFilenamePattern(String archivedLogFilenamePattern) { this.archivedLogFilenamePattern = archivedLogFilenamePattern; }
@Test void appenderNameIsSet(@TempDir Path tempDir) { final Logger root = (Logger) LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME); final FileAppenderFactory<ILoggingEvent> appenderFactory = new FileAppenderFactory<>(); appenderFactory.setArchivedLogFilenamePattern(tempDir.resolve("example-%d.log.gz").toString()); Appender<ILoggingEvent> appender = null; try { appender = appenderFactory.build(root.getLoggerContext(), "test", new DropwizardLayoutFactory(), new NullLevelFilterFactory<>(), new AsyncLoggingEventAppenderFactory()); assertThat(appender.getName()).isEqualTo("async-file-appender"); } finally { if (appender != null) { appender.stop(); } } }
public static boolean containsType( @Nullable Throwable exception, Class<? extends Throwable> type) { if (exception == null) { return false; } if (type.isAssignableFrom(exception.getClass())) { return true; } return containsType(exception.getCause(), type); }
@Test public void testContainsTypeNegativeWithNested() { assertThat( containsType( new IllegalStateException( "There is a bad state in the client", new IllegalArgumentException("RESOURCE_EXHAUSTED: Quota issues")), NoSuchFieldException.class)) .isFalse(); }
@Override public List<Plugin> plugins() { List<Plugin> plugins = configurationParameters.get(PLUGIN_PROPERTY_NAME, s -> Arrays.stream(s.split(",")) .map(String::trim) .map(PluginOption::parse) .map(pluginOption -> (Plugin) pluginOption) .collect(Collectors.toList())) .orElseGet(ArrayList::new); getPublishPlugin() .ifPresent(plugins::add); return plugins; }
@Test void getPluginNamesWithNothingEnabled() { ConfigurationParameters config = new EmptyConfigurationParameters(); assertThat(new CucumberEngineOptions(config).plugins().stream() .map(Options.Plugin::pluginString) .collect(toList()), empty()); }
public long getReferenceCount(E key) { ReferenceCounter counter = referenceMap.get(key); if (counter != null) { return counter.getRefCount(); } return 0; }
@Test public void testRefCountMapConcurrently() throws Exception { ReferenceCountMap<AclFeature> countMap = new ReferenceCountMap<>(); PutThread putThread1 = new PutThread(countMap); putThread1.start(); PutThread putThread2 = new PutThread(countMap); putThread2.start(); RemoveThread removeThread1 = new RemoveThread(countMap); putThread1.join(); putThread2.join(); Assert.assertEquals(2 * LOOP_COUNTER, countMap.getReferenceCount(aclFeature1)); Assert.assertEquals(2 * LOOP_COUNTER, countMap.getReferenceCount(aclFeature2)); removeThread1.start(); removeThread1.join(); Assert.assertEquals(LOOP_COUNTER, countMap.getReferenceCount(aclFeature1)); Assert.assertEquals(LOOP_COUNTER, countMap.getReferenceCount(aclFeature2)); }
@Udf public String extractPath( @UdfParameter(description = "a valid URL") final String input) { return UrlParser.extract(input, URI::getPath); }
@Test public void shouldExtractPathIfPresent() { assertThat(extractUdf.extractPath("https://docs.confluent.io/current/ksql/docs/syntax-reference.html#scalar-functions"), equalTo("/current/ksql/docs/syntax-reference.html")); }
public static Set<Result> anaylze(String log) { Set<Result> results = new HashSet<>(); for (Rule rule : Rule.values()) { Matcher matcher = rule.pattern.matcher(log); if (matcher.find()) { results.add(new Result(rule, log, matcher)); } } return results; }
@Test public void fabricWarnings2() throws IOException { CrashReportAnalyzer.Result result = findResultByRule( CrashReportAnalyzer.anaylze(loadLog("/logs/fabric_warnings3.txt")), CrashReportAnalyzer.Rule.FABRIC_WARNINGS); assertEquals(("net.fabricmc.loader.impl.FormattedException: Some of your mods are incompatible with the game or each other!\n" + "确定了一种可能的解决方法,这样做可能会解决你的问题:\n" + "\t - 安装 fabric-api,任意版本。\n" + "\t - 安装 sodium,0.5.6 及以上版本。\n" + "更多信息:\n" + "\t - 模组 'Sodium Extra' (sodium-extra) 0.5.4+mc1.20.4-build.116 需要 fabric-api 的 任意版本,但没有安装它!\n" + "\t - 模组 'Sodium Extra' (sodium-extra) 0.5.4+mc1.20.4-build.116 需要 sodium 的 0.5.6 及以上版本,但没有安装它!\n" + "\tat net.fabricmc.loader.impl.FormattedException.ofLocalized(FormattedException.java:51) ~").replaceAll("\\s+", ""), result.getMatcher().group("reason").replaceAll("\\s+", "")); }
@Override public TableDataConsistencyCheckResult swapToObject(final YamlTableDataConsistencyCheckResult yamlConfig) { if (null == yamlConfig) { return null; } if (!Strings.isNullOrEmpty(yamlConfig.getIgnoredType())) { return new TableDataConsistencyCheckResult(TableDataConsistencyCheckIgnoredType.valueOf(yamlConfig.getIgnoredType())); } return new TableDataConsistencyCheckResult(yamlConfig.isMatched()); }
@Test void assertSwapToObjectWithEmptyString() { assertNotNull(yamlTableDataConsistencyCheckResultSwapper.swapToObject("")); }
public AndroidModel getModel() { return mModel; }
@Test public void testSelectsNexus4OnExactMatch() { org.robolectric.shadows.ShadowLog.stream = System.err; AndroidModel model = new AndroidModel("4.4.2", "KOT49H","Nexus 4","LGE"); ModelSpecificDistanceCalculator distanceCalculator = new ModelSpecificDistanceCalculator(null, null, model); assertEquals("should be Nexus 4", "Nexus 4", distanceCalculator.getModel().getModel()); }
@Override public int getConcurrency() throws SQLException { checkClosed(); return concurrency; }
@Test void assertGetConcurrency() throws SQLException { assertThat(databaseMetaDataResultSet.getConcurrency(), is(ResultSet.CONCUR_READ_ONLY)); }
public void writeAllCurrentRequestsAsPart( Map<JavaInformations, List<CounterRequestContext>> currentRequests, Collector collector, List<Counter> counters, long timeOfSnapshot) throws IOException { try { document.open(); // on remplace les parentCounters final List<CounterRequestContext> allCurrentRequests = new ArrayList<>(); for (final List<CounterRequestContext> rootCurrentContexts : currentRequests.values()) { allCurrentRequests.addAll(rootCurrentContexts); } CounterRequestContext.replaceParentCounters(allCurrentRequests, counters); final List<PdfCounterReport> pdfCounterReports = new ArrayList<>(); // ce range n'a pas d'importance pour ce pdf final Range range = Period.TOUT.getRange(); for (final Counter counter : counters) { final PdfCounterReport pdfCounterReport = new PdfCounterReport(collector, counter, range, false, document); pdfCounterReports.add(pdfCounterReport); } final Font normalFont = PdfFonts.NORMAL.getFont(); for (final Map.Entry<JavaInformations, List<CounterRequestContext>> entry : currentRequests .entrySet()) { final JavaInformations javaInformations = entry.getKey(); final List<CounterRequestContext> rootCurrentContexts = entry.getValue(); addParagraph(getString("Requetes_en_cours"), "hourglass.png"); if (rootCurrentContexts.isEmpty()) { addToDocument(new Phrase(getString("Aucune_requete_en_cours"), normalFont)); } else { final PdfCounterRequestContextReport pdfCounterRequestContextReport = new PdfCounterRequestContextReport( rootCurrentContexts, pdfCounterReports, javaInformations.getThreadInformationsList(), javaInformations.isStackTraceEnabled(), pdfDocumentFactory, document); pdfCounterRequestContextReport.setTimeOfSnapshot(timeOfSnapshot); pdfCounterRequestContextReport.writeContextDetails(); } } } catch (final DocumentException e) { throw createIOException(e); } document.close(); }
@Test public void testWriteAllCurrentRequestsAsPart() throws IOException { final ByteArrayOutputStream output = new ByteArrayOutputStream(); final PdfOtherReport pdfOtherReport = new PdfOtherReport(TEST_APP, output); final Counter counter = new Counter("services", null); final Collector collector = new Collector(TEST_APP, List.of(counter)); final long timeOfSnapshot = System.currentTimeMillis(); final List<CounterRequestContext> requests = Collections.emptyList(); final JavaInformations javaInformations = new JavaInformations(null, true); final Map<JavaInformations, List<CounterRequestContext>> currentRequests = Collections .singletonMap(javaInformations, requests); pdfOtherReport.writeAllCurrentRequestsAsPart(currentRequests, collector, collector.getCounters(), timeOfSnapshot); assertNotEmptyAndClear(output); }
public void setup(final Map<String, InternalTopicConfig> topicConfigs) { log.info("Starting to setup internal topics {}.", topicConfigs.keySet()); final long now = time.milliseconds(); final long deadline = now + retryTimeoutMs; final Map<String, Map<String, String>> streamsSideTopicConfigs = topicConfigs.values().stream() .collect(Collectors.toMap( InternalTopicConfig::name, topicConfig -> topicConfig.properties(defaultTopicConfigs, windowChangeLogAdditionalRetention) )); final Set<String> createdTopics = new HashSet<>(); final Set<String> topicStillToCreate = new HashSet<>(topicConfigs.keySet()); while (!topicStillToCreate.isEmpty()) { final Set<NewTopic> newTopics = topicStillToCreate.stream() .map(topicName -> new NewTopic( topicName, topicConfigs.get(topicName).numberOfPartitions(), Optional.of(replicationFactor) ).configs(streamsSideTopicConfigs.get(topicName)) ).collect(Collectors.toSet()); log.info("Going to create internal topics: " + newTopics); final CreateTopicsResult createTopicsResult = adminClient.createTopics(newTopics); processCreateTopicResults(createTopicsResult, topicStillToCreate, createdTopics, deadline); maybeSleep(Collections.singletonList(topicStillToCreate), deadline, "created"); } log.info("Completed setup of internal topics {}.", topicConfigs.keySet()); }
@Test public void shouldThrowTimeoutExceptionWhenCreateTopicExceedsTimeout() { final AdminClient admin = mock(AdminClient.class); final MockTime time = new MockTime( (Integer) config.get(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG)) / 3 ); final StreamsConfig streamsConfig = new StreamsConfig(config); final InternalTopicManager topicManager = new InternalTopicManager(time, admin, streamsConfig); final KafkaFutureImpl<TopicMetadataAndConfig> createTopicFailFuture = new KafkaFutureImpl<>(); createTopicFailFuture.completeExceptionally(new TimeoutException()); final InternalTopicConfig internalTopicConfig = setupRepartitionTopicConfig(topic1, 1); final NewTopic newTopic = newTopic(topic1, internalTopicConfig, streamsConfig); when(admin.createTopics(mkSet(newTopic))) .thenAnswer(answer -> new MockCreateTopicsResult(mkMap(mkEntry(topic1, createTopicFailFuture)))); assertThrows( TimeoutException.class, () -> topicManager.setup(Collections.singletonMap(topic1, internalTopicConfig)) ); }
@Override public SmileResponse<T> handle(Request request, Response response) { byte[] bytes = readResponseBytes(response); String contentType = response.getHeader(CONTENT_TYPE); if ((contentType == null) || !MediaType.parse(contentType).is(MEDIA_TYPE_SMILE)) { return new SmileResponse<>(response.getStatusCode(), response.getHeaders(), bytes); } return new SmileResponse<>(response.getStatusCode(), response.getHeaders(), smileCodec, bytes); }
@Test public void testMissingContentType() { SmileResponse<User> response = handler.handle(null, new TestingResponse(OK, ImmutableListMultimap.of(), "hello".getBytes(UTF_8))); assertFalse(response.hasValue()); assertNull(response.getException()); assertNull(response.getSmileBytes()); assertEquals(response.getResponseBytes(), "hello".getBytes(UTF_8)); assertTrue(response.getHeaders().isEmpty()); }
public static void copy(ImmutableList<Path> sourceFiles, Path destDir) throws IOException { for (Path sourceFile : sourceFiles) { PathConsumer copyPathConsumer = path -> { // Creates the same path in the destDir. Path parent = Verify.verifyNotNull(sourceFile.getParent()); Path destPath = destDir.resolve(parent.relativize(path)); if (Files.isDirectory(path)) { Files.createDirectories(destPath); } else { Files.copy(path, destPath); } }; if (Files.isDirectory(sourceFile)) { new DirectoryWalker(sourceFile).walk(copyPathConsumer); } else { copyPathConsumer.accept(sourceFile); } } }
@Test public void testCopy() throws IOException, URISyntaxException { Path destDir = temporaryFolder.newFolder().toPath(); Path libraryA = Paths.get(Resources.getResource("core/application/dependencies/libraryA.jar").toURI()); Path libraryB = Paths.get(Resources.getResource("core/application/dependencies/libraryB.jar").toURI()); Path dirLayer = Paths.get(Resources.getResource("core/layer").toURI()); FileOperations.copy(ImmutableList.of(libraryA, libraryB, dirLayer), destDir); assertFilesEqual(libraryA, destDir.resolve("libraryA.jar")); assertFilesEqual(libraryB, destDir.resolve("libraryB.jar")); Assert.assertTrue(Files.exists(destDir.resolve("layer").resolve("a").resolve("b"))); Assert.assertTrue(Files.exists(destDir.resolve("layer").resolve("c"))); assertFilesEqual( dirLayer.resolve("a").resolve("b").resolve("bar"), destDir.resolve("layer").resolve("a").resolve("b").resolve("bar")); assertFilesEqual( dirLayer.resolve("c").resolve("cat"), destDir.resolve("layer").resolve("c").resolve("cat")); assertFilesEqual(dirLayer.resolve("foo"), destDir.resolve("layer").resolve("foo")); }
public boolean isFirstIp() { return Objects.equals(serverList.firstKey(), this.localAddress); }
@Test void testIsFirstIp() { assertFalse(serverMemberManager.isFirstIp()); }
@GET @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) @Override public ClusterInfo get() { return getClusterInfo(); }
@Test public void testInvalidUri2() throws JSONException, Exception { WebResource r = resource(); String responseStr = ""; try { responseStr = r.accept(MediaType.APPLICATION_JSON).get(String.class); fail("should have thrown exception on invalid uri"); } catch (UniformInterfaceException ue) { ClientResponse response = ue.getResponse(); assertResponseStatusCode(Status.NOT_FOUND, response.getStatusInfo()); WebServicesTestUtils.checkStringMatch( "error string exists and shouldn't", "", responseStr); } }
static BigtableWriteOptions translateToBigtableWriteOptions( BigtableWriteOptions writeOptions, BigtableOptions options) { BigtableWriteOptions.Builder builder = writeOptions.toBuilder(); // configure timeouts if (options.getCallOptionsConfig().getMutateRpcAttemptTimeoutMs().isPresent()) { builder.setAttemptTimeout( org.joda.time.Duration.millis( options.getCallOptionsConfig().getMutateRpcAttemptTimeoutMs().get())); } if (options.getBulkOptions().isEnableBulkMutationThrottling()) { builder.setThrottlingTargetMs(options.getBulkOptions().getBulkMutationRpcTargetMs()); } builder.setOperationTimeout( org.joda.time.Duration.millis(options.getCallOptionsConfig().getMutateRpcTimeoutMs())); // configure batch size builder.setMaxElementsPerBatch(options.getBulkOptions().getBulkMaxRowKeyCount()); builder.setMaxBytesPerBatch(options.getBulkOptions().getBulkMaxRequestSize()); builder.setMaxOutstandingElements( options.getBulkOptions().getMaxInflightRpcs() * (long) options.getBulkOptions().getBulkMaxRowKeyCount()); builder.setMaxOutstandingBytes( options.getBulkOptions().getMaxInflightRpcs() * options.getBulkOptions().getBulkMaxRequestSize()); return builder.build(); }
@Test public void testBigtableOptionsToBigtableWriteOptions() throws Exception { BigtableOptions options = BigtableOptions.builder() .setCallOptionsConfig( CallOptionsConfig.builder() .setMutateRpcAttemptTimeoutMs(200) .setMutateRpcTimeoutMs(2000) .build()) .setRetryOptions( RetryOptions.builder() .setInitialBackoffMillis(15) .setBackoffMultiplier(2.5) .build()) .setBulkOptions( BulkOptions.builder() .setBulkMaxRequestSize(20) .setBulkMaxRowKeyCount(100) .setMaxInflightRpcs(5) .build()) .build(); BigtableWriteOptions writeOptions = BigtableWriteOptions.builder() .setTableId(ValueProvider.StaticValueProvider.of("table")) .build(); BigtableWriteOptions fromBigtableOptions = BigtableConfigTranslator.translateToBigtableWriteOptions(writeOptions, options); assertNotNull(fromBigtableOptions.getAttemptTimeout()); assertNotNull(fromBigtableOptions.getOperationTimeout()); assertNotNull(fromBigtableOptions.getMaxBytesPerBatch()); assertNotNull(fromBigtableOptions.getMaxElementsPerBatch()); assertNotNull(fromBigtableOptions.getMaxOutstandingElements()); assertNotNull(fromBigtableOptions.getMaxOutstandingBytes()); assertEquals(org.joda.time.Duration.millis(200), fromBigtableOptions.getAttemptTimeout()); assertEquals(org.joda.time.Duration.millis(2000), fromBigtableOptions.getOperationTimeout()); assertEquals(20, (long) fromBigtableOptions.getMaxBytesPerBatch()); assertEquals(100, (long) fromBigtableOptions.getMaxElementsPerBatch()); assertEquals(5 * 100, (long) fromBigtableOptions.getMaxOutstandingElements()); assertEquals(5 * 20, (long) fromBigtableOptions.getMaxOutstandingBytes()); }
public static CDCResponse failed(final String requestId, final String errorCode, final String errorMessage) { return CDCResponse.newBuilder().setStatus(Status.FAILED).setRequestId(requestId).setErrorCode(errorCode).setErrorMessage(errorMessage).build(); }
@Test void assertFailed() { CDCResponse actualResponse = CDCResponseUtils.failed("request_id_1", XOpenSQLState.GENERAL_ERROR.getValue(), "Error"); assertThat(actualResponse.getStatus(), is(CDCResponse.Status.FAILED)); assertThat(actualResponse.getRequestId(), is("request_id_1")); assertThat(actualResponse.getErrorCode(), is(XOpenSQLState.GENERAL_ERROR.getValue())); assertThat(actualResponse.getErrorMessage(), is("Error")); }
@Override public void start() { this.executorService.scheduleAtFixedRate(this::tryBroadcastEvents, getInitialDelay(), getPeriod(), TimeUnit.SECONDS); }
@Test public void broadcast_should_stop_polling_for_events_when_all_clients_unregister() { var project = db.components().insertPrivateProject().getMainBranchComponent(); system2.setNow(1L); var sonarLintClient = mock(SonarLintClient.class); when(sonarLintClient.getClientProjectUuids()).thenReturn(Set.of(project.uuid())); when(clientsRegistry.getClients()).thenReturn(List.of(sonarLintClient), emptyList()); var underTest = new PushEventPollScheduler(executorService, clientsRegistry, db.getDbClient(), system2, config); underTest.start(); executorService.runCommand(); verify(clientsRegistry, times(0)).broadcastMessage(any(SonarLintPushEvent.class)); system2.tick(); // tick=2 generatePushEvent(project.uuid()); underTest.start(); executorService.runCommand(); // all clients have been unregistered, nothing to broadcast verify(clientsRegistry, times(0)).broadcastMessage(any(SonarLintPushEvent.class)); }
public static String removeAllSuffix(CharSequence str, CharSequence suffix) { if (isEmpty(str) || isEmpty(suffix)) { return str(str); } final String suffixStr = suffix.toString(); final int suffixLength = suffixStr.length(); final String str2 = str.toString(); int toIndex = str2.length(); while (str2.startsWith(suffixStr, toIndex - suffixLength)){ toIndex -= suffixLength; } return subPre(str2, toIndex); }
@Test public void removeAllSuffixTest() { final String prefix = "ab"; String str = "cdefabab"; String result = CharSequenceUtil.removeAllSuffix(str, prefix); assertEquals("cdef", result); str = "cdefab"; result = CharSequenceUtil.removeAllSuffix(str, prefix); assertEquals("cdef", result); str = "cdef"; result = CharSequenceUtil.removeAllSuffix(str, prefix); assertEquals("cdef", result); str = ""; result = CharSequenceUtil.removeAllSuffix(str, prefix); assertEquals("", result); str = null; result = CharSequenceUtil.removeAllSuffix(str, prefix); assertNull(result); }
public Optional<Measure> toMeasure(@Nullable ScannerReport.Measure batchMeasure, Metric metric) { Objects.requireNonNull(metric); if (batchMeasure == null) { return Optional.empty(); } Measure.NewMeasureBuilder builder = Measure.newMeasureBuilder(); switch (metric.getType().getValueType()) { case INT: return toIntegerMeasure(builder, batchMeasure); case LONG: return toLongMeasure(builder, batchMeasure); case DOUBLE: return toDoubleMeasure(builder, batchMeasure); case BOOLEAN: return toBooleanMeasure(builder, batchMeasure); case STRING: return toStringMeasure(builder, batchMeasure); case LEVEL: return toLevelMeasure(builder, batchMeasure); case NO_VALUE: return toNoValueMeasure(builder); default: throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType()); } }
@Test public void toMeasure_returns_absent_for_null_argument() { assertThat(underTest.toMeasure(null, SOME_INT_METRIC)).isNotPresent(); }
@Override public void suspend() { switch (state()) { case CREATED: transitToSuspend(); break; case RESTORING: transitToSuspend(); break; case RUNNING: try { // use try-catch to ensure state transition to SUSPENDED even if user code throws in `Processor#close()` closeTopology(); // we must clear the buffered records when suspending because upon resuming the consumer would // re-fetch those records starting from the committed position partitionGroup.clear(); } finally { transitToSuspend(); } break; case SUSPENDED: log.info("Skip suspending since state is {}", state()); break; case CLOSED: throw new IllegalStateException("Illegal state " + state() + " while suspending active task " + id); default: throw new IllegalStateException("Unknown state " + state() + " while suspending active task " + id); } }
@Test public void shouldFailOnCommitIfTaskIsClosed() { when(stateManager.taskType()).thenReturn(TaskType.ACTIVE); task = createStatelessTask(createConfig()); task.suspend(); task.transitionTo(Task.State.CLOSED); final IllegalStateException thrown = assertThrows( IllegalStateException.class, task::prepareCommit ); assertThat(thrown.getMessage(), is("Illegal state CLOSED while preparing active task 0_0 for committing")); }
public static String getProxyHome() { return proxyHome; }
@Test public void testGetProxyHome() { // test configured proxy home assertThat(ConfigurationManager.getProxyHome()).isIn(mockProxyHome, "./"); }
public static Criterion matchIPv6Src(IpPrefix ip) { return new IPCriterion(ip, Type.IPV6_SRC); }
@Test public void testMatchIPv6SrcMethod() { Criterion matchIpv6Src = Criteria.matchIPv6Src(ipv61); IPCriterion ipCriterion = checkAndConvert(matchIpv6Src, Criterion.Type.IPV6_SRC, IPCriterion.class); assertThat(ipCriterion.ip(), is(ipv61)); }
@Override public final void getSize(@NonNull SizeReadyCallback cb) { sizeDeterminer.getSize(cb); }
@Test public void testSizeCallbackIsCalledPreDrawIfNoDimensAndNoLayoutParamsButLayoutParamsSetLater() { target.getSize(cb); int width = 689; int height = 354; LayoutParams layoutParams = new FrameLayout.LayoutParams(width, height); view.setLayoutParams(layoutParams); view.requestLayout(); view.getViewTreeObserver().dispatchOnPreDraw(); verify(cb).onSizeReady(eq(width), eq(height)); }
public static <V> Builder<V> newBuilder() { return new Builder<>(); }
@Test public void testConstructor() { try { ConcurrentLongHashMap.<String>newBuilder() .expectedItems(0) .build(); fail("should have thrown exception"); } catch (IllegalArgumentException e) { // ok } try { ConcurrentLongHashMap.<String>newBuilder() .expectedItems(16) .concurrencyLevel(0) .build(); fail("should have thrown exception"); } catch (IllegalArgumentException e) { // ok } try { ConcurrentLongHashMap.<String>newBuilder() .expectedItems(4) .concurrencyLevel(8) .build(); fail("should have thrown exception"); } catch (IllegalArgumentException e) { // ok } }
@Audit @Operation(summary = "command", description = "Command for component by [host,component,service,cluster]") @PostMapping public ResponseEntity<CommandVO> command(@RequestBody @Validated CommandReq commandReq) { CommandDTO commandDTO = CommandConverter.INSTANCE.fromReq2DTO(commandReq); CommandVO commandVO = commandService.command(commandDTO); return ResponseEntity.success(commandVO); }
@Test void commandHandlesInvalidRequest() { CommandReq commandReq = new CommandReq(); // Assuming this is invalid when(commandService.command(any(CommandDTO.class))).thenReturn(null); ResponseEntity<CommandVO> response = commandController.command(commandReq); assertTrue(response.isSuccess()); assertNull(response.getData()); }
@JsonCreator public static ClosingRetentionStrategyConfig create(@JsonProperty(TYPE_FIELD) String type, @JsonProperty("max_number_of_indices") @Min(1) int maxNumberOfIndices) { return new AutoValue_ClosingRetentionStrategyConfig(type, maxNumberOfIndices); }
@Test public void testCreate() throws Exception { final ClosingRetentionStrategyConfig config = ClosingRetentionStrategyConfig.create(12); assertThat(config.maxNumberOfIndices()).isEqualTo(12); }
public void checkOin(String entityId, String oin) { Pattern pattern = Pattern.compile("urn:nl-eid-gdi:1.0:\\w+:" + oin + ":entities:\\d+"); Matcher matcher = pattern.matcher(entityId); if (!matcher.matches()) { throw new MetadataParseException("OIN certificate does not match entityID"); } }
@Test public void checkOinFailTest() { assertThrows(MetadataParseException.class, () -> { metadataProcessorServiceMock.checkOin("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS", "SSSSSSSSSSSSSSSSSSSS"); }); }
@Override public CompletableFuture<Void> heartbeatFromResourceManager(final ResourceID resourceID) { return resourceManagerHeartbeatManager.requestHeartbeat(resourceID, null); }
@Test void testResourceManagerBecomesUnreachableTriggersDisconnect() throws Exception { final String resourceManagerAddress = "rm"; final ResourceManagerId resourceManagerId = ResourceManagerId.generate(); final ResourceID rmResourceId = new ResourceID(resourceManagerAddress); final TestingResourceManagerGateway resourceManagerGateway = new TestingResourceManagerGateway( resourceManagerId, rmResourceId, resourceManagerAddress, "localhost"); final CompletableFuture<JobID> disconnectedJobManagerFuture = new CompletableFuture<>(); final CountDownLatch registrationAttempts = new CountDownLatch(2); final Queue<CompletableFuture<RegistrationResponse>> connectionResponses = new ArrayDeque<>(2); connectionResponses.add( CompletableFuture.completedFuture( resourceManagerGateway.getJobMasterRegistrationSuccess())); connectionResponses.add(new CompletableFuture<>()); resourceManagerGateway.setRegisterJobManagerFunction( (jobMasterId, resourceID, s, jobID) -> { registrationAttempts.countDown(); return connectionResponses.poll(); }); resourceManagerGateway.setDisconnectJobManagerConsumer( tuple -> disconnectedJobManagerFuture.complete(tuple.f0)); resourceManagerGateway.setJobMasterHeartbeatFunction( ignored -> FutureUtils.completedExceptionally( new RecipientUnreachableException( "sender", "recipient", "resource manager is unreachable"))); rpcService.registerGateway(resourceManagerAddress, resourceManagerGateway); try (final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withJobMasterId(jobMasterId) .withResourceId(jmResourceId) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .createJobMaster()) { jobMaster.start(); // define a leader and see that a registration happens rmLeaderRetrievalService.notifyListener( resourceManagerAddress, resourceManagerId.toUUID()); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); CommonTestUtils.waitUntilCondition( () -> { jobMasterGateway.heartbeatFromResourceManager(rmResourceId); return disconnectedJobManagerFuture.isDone(); }, 50L); // heartbeat timeout should trigger disconnect JobManager from ResourceManager assertThat(disconnectedJobManagerFuture.join()).isEqualTo(jobGraph.getJobID()); // the JobMaster should try to reconnect to the RM registrationAttempts.await(); } }
public static FixedBuilder<Schema> fixed(String name) { return builder().fixed(name); }
@Test void nameReserved() { assertThrows(AvroTypeException.class, () -> { SchemaBuilder.fixed("long").namespace("").size(1); }); }
public FilterAggregationBuilder buildTopAggregation(String topAggregationName, TopAggregationDefinition<?> topAggregation, Consumer<BoolQueryBuilder> extraFilters, Consumer<FilterAggregationBuilder> subAggregations) { BoolQueryBuilder filter = filterComputer.getTopAggregationFilter(topAggregation) .orElseGet(QueryBuilders::boolQuery); // optionally add extra filter(s) extraFilters.accept(filter); FilterAggregationBuilder res = AggregationBuilders.filter(topAggregationName, filter); subAggregations.accept(res); checkState( !res.getSubAggregations().isEmpty(), "no sub-aggregation has been added to top-aggregation %s", topAggregationName); return res; }
@Test public void buildTopAggregation_adds_filter_from_FiltersComputer_for_TopAggregation() { SimpleFieldTopAggregationDefinition topAggregation = new SimpleFieldTopAggregationDefinition("bar", false); SimpleFieldTopAggregationDefinition otherTopAggregation = new SimpleFieldTopAggregationDefinition("acme", false); BoolQueryBuilder computerFilter = boolQuery(); BoolQueryBuilder otherFilter = boolQuery(); when(filtersComputer.getTopAggregationFilter(topAggregation)).thenReturn(Optional.of(computerFilter)); when(filtersComputer.getTopAggregationFilter(otherTopAggregation)).thenReturn(Optional.of(otherFilter)); MinAggregationBuilder subAggregation = AggregationBuilders.min("donut"); String topAggregationName = randomAlphabetic(10); FilterAggregationBuilder aggregationBuilder = underTest.buildTopAggregation(topAggregationName, topAggregation, NO_EXTRA_FILTER, t -> t.subAggregation(subAggregation)); assertThat(aggregationBuilder.getName()).isEqualTo(topAggregationName); assertThat(aggregationBuilder.getFilter()).isSameAs(computerFilter); }
public static long freePhysicalMemory() { return readLongAttribute("FreePhysicalMemorySize", -1L); }
@Test public void testFreePhysicalMemory() { assertTrue(freePhysicalMemory() >= -1); }
public static Applier fromSource(CharSequence source, EndPosTable endPositions) { return new Applier(source, endPositions); }
@Test public void shouldReturnNullOnEmptyFix() { AppliedFix fix = AppliedFix.fromSource("public class Foo {}", endPositions).apply(SuggestedFix.emptyFix()); assertThat(fix).isNull(); }
public static String getInstanceWorkerIdRootNodePath() { return String.join("/", "", ROOT_NODE, COMPUTE_NODE, WORKER_ID); }
@Test void assertGetInstanceWorkerIdRootNodePath() { assertThat(ComputeNode.getInstanceWorkerIdRootNodePath(), is("/nodes/compute_nodes/worker_id")); }
public List<MetricsPacket> getMetrics(List<VespaService> services, Instant startTime) { MetricsPacket.Builder[] builderArray = getMetricsBuildersAsArray(services, startTime, null); List<MetricsPacket> metricsPackets = new ArrayList<>(builderArray.length); for (int i = 0; i < builderArray.length; i++) { metricsPackets.add(builderArray[i].build()); builderArray[i] = null; // Set null to be able to GC the builder when packet has been created } return metricsPackets; }
@Test public void service_that_is_down_has_a_separate_metrics_packet() { // Reset to use only the service that is down var downService = new DownService(HealthMetric.getDown("No response")); List<VespaService> testServices = List.of(downService); MetricsManager metricsManager = TestUtil.createMetricsManager(new VespaServices(testServices), getMetricsConsumers(),getApplicationDimensions(), getNodeDimensions()); List<MetricsPacket> packets = metricsManager.getMetrics(testServices, Instant.EPOCH); assertEquals(1, packets.size()); assertTrue(packets.get(0).metrics().isEmpty()); assertEquals(DownService.NAME, packets.get(0).dimensions().get(toDimensionId("instance"))); assertEquals("value", packets.get(0).dimensions().get(toDimensionId("global"))); }
@Override public void update(byte[] b, int off, int len) { int localCrc = crc; while (len > 7) { int c0 = b[off++] ^ localCrc; int c1 = b[off++] ^ (localCrc >>>= 8); int c2 = b[off++] ^ (localCrc >>>= 8); int c3 = b[off++] ^ (localCrc >>>= 8); localCrc = (T8_7[c0 & 0xff] ^ T8_6[c1 & 0xff]) ^ (T8_5[c2 & 0xff] ^ T8_4[c3 & 0xff]); localCrc ^= (T8_3[b[off++] & 0xff] ^ T8_2[b[off++] & 0xff]) ^ (T8_1[b[off++] & 0xff] ^ T8_0[b[off++] & 0xff]); len -= 8; } while (len > 0) { localCrc = (localCrc >>> 8) ^ T8_0[(localCrc ^ b[off++]) & 0xff]; len--; } // Publish crc out to object crc = localCrc; }
@Test public void testUpdate() { Crc32C c = new Crc32C(); c.update(1); c.getMaskedValue(); c.getIntValue(); c.getValue(); }
@Override public ObjectNode encode(MappingAction action, CodecContext context) { EncodeMappingActionCodecHelper encoder = new EncodeMappingActionCodecHelper(action, context); return encoder.encode(); }
@Test public void dropActionTest() { final DropMappingAction action = MappingActions.drop(); final ObjectNode actionJson = actionCodec.encode(action, context); assertThat(actionJson, matchesAction(action)); }
@VisibleForTesting FsAction getFsAction(int accessGroup, FTPFile ftpFile) { FsAction action = FsAction.NONE; if (ftpFile.hasPermission(accessGroup, FTPFile.READ_PERMISSION)) { action = action.or(FsAction.READ); } if (ftpFile.hasPermission(accessGroup, FTPFile.WRITE_PERMISSION)) { action = action.or(FsAction.WRITE); } if (ftpFile.hasPermission(accessGroup, FTPFile.EXECUTE_PERMISSION)) { action = action.or(FsAction.EXECUTE); } return action; }
@Test public void testGetFsAction(){ FTPFileSystem ftp = new FTPFileSystem(); int[] accesses = new int[] {FTPFile.USER_ACCESS, FTPFile.GROUP_ACCESS, FTPFile.WORLD_ACCESS}; FsAction[] actions = FsAction.values(); for(int i = 0; i < accesses.length; i++){ for(int j = 0; j < actions.length; j++){ enhancedAssertEquals(actions[j], ftp.getFsAction(accesses[i], getFTPFileOf(accesses[i], actions[j]))); } } }
@Bean("BranchConfiguration") public BranchConfiguration provide(@Nullable BranchConfigurationLoader loader, ProjectConfiguration projectConfiguration, ProjectBranches branches) { if (loader == null) { return new DefaultBranchConfiguration(); } else { Profiler profiler = Profiler.create(LOG).startInfo(LOG_MSG); BranchConfiguration branchConfiguration = loader.load(projectConfiguration.getProperties(), branches); profiler.stopInfo(); return branchConfiguration; } }
@Test public void should_use_loader() { when(loader.load(eq(projectSettings), eq(branches))).thenReturn(config); BranchConfiguration result = provider.provide(loader, projectConfiguration, branches); assertThat(result).isSameAs(config); }
@Override public List<AiVideoConfigDO> getAiVideoConfigList(Collection<Long> ids) { return aiVideoConfigMapper.selectBatchIds(ids); }
@Test @Disabled // TODO 请修改 null 为需要的值,然后删除 @Disabled 注解 public void testGetAiVideoConfigList() { // mock 数据 AiVideoConfigDO dbAiVideoConfig = randomPojo(AiVideoConfigDO.class, o -> { // 等会查询到 o.setType(null); o.setValue(null); o.setStatus(null); o.setCreateTime(null); }); aiVideoConfigMapper.insert(dbAiVideoConfig); // 测试 type 不匹配 aiVideoConfigMapper.insert(cloneIgnoreId(dbAiVideoConfig, o -> o.setType(null))); // 测试 value 不匹配 aiVideoConfigMapper.insert(cloneIgnoreId(dbAiVideoConfig, o -> o.setValue(null))); // 测试 status 不匹配 aiVideoConfigMapper.insert(cloneIgnoreId(dbAiVideoConfig, o -> o.setStatus(null))); // 测试 createTime 不匹配 aiVideoConfigMapper.insert(cloneIgnoreId(dbAiVideoConfig, o -> o.setCreateTime(null))); // 准备参数 AiVideoConfigExportReqVO reqVO = new AiVideoConfigExportReqVO(); reqVO.setType(null); reqVO.setValue(null); reqVO.setStatus(null); reqVO.setCreateTime(buildBetweenTime(2023, 2, 1, 2023, 2, 28)); // 调用 List<AiVideoConfigDO> list = aiVideoConfigService.getAiVideoConfigList(reqVO); // 断言 assertEquals(1, list.size()); assertPojoEquals(dbAiVideoConfig, list.get(0)); }
public static <DestT, InputT> Write<DestT, InputT> writeDynamic() { return new AutoValue_FileIO_Write.Builder<DestT, InputT>() .setDynamic(true) .setCompression(Compression.UNCOMPRESSED) .setIgnoreWindowing(false) .setAutoSharding(false) .setNoSpilling(false) .build(); }
@Test @Category(NeedsRunner.class) public void testFileIoDynamicNaming() throws IOException { // Test for BEAM-6407. String outputFileName = tmpFolder.newFile().getAbsolutePath(); PCollectionView<String> outputFileNameView = p.apply("outputFileName", Create.of(outputFileName)).apply(View.asSingleton()); Contextful.Fn<String, FileIO.Write.FileNaming> fileNaming = (element, c) -> (window, pane, numShards, shardIndex, compression) -> c.sideInput(outputFileNameView) + "-" + shardIndex; p.apply(Create.of("")) .apply( "WriteDynamicFilename", FileIO.<String, String>writeDynamic() .by(SerializableFunctions.constant("")) .withDestinationCoder(StringUtf8Coder.of()) .via(TextIO.sink()) .withTempDirectory(tmpFolder.newFolder().getAbsolutePath()) .withNaming( Contextful.of( fileNaming, Requirements.requiresSideInputs(outputFileNameView)))); // We need to run the TestPipeline with the default options. p.run(PipelineOptionsFactory.create()).waitUntilFinish(); assertTrue( "Output file shard 0 exists after pipeline completes", new File(outputFileName + "-0").exists()); }
@Override public double interpolate(double x1, double x2) { int n = this.x1.length; double y = yvi[n]; for (int i = 0; i < n; i++) { double d1 = x1 - this.x1[i]; double d2 = x2 - this.x2[i]; double d = d1 * d1 + d2 * d2; y += yvi[i] * variogram(d); } return y; }
@Test public void testInterpolate() { System.out.println("interpolate"); int n = data.length; double[] lat = new double[n]; double[] lon = new double[n]; double[] y = new double[n]; for (int i = 0; i < n; i++) { y[i] = data[i][0]; lat[i] = data[i][1]; lon[i] = data[i][2]; } KrigingInterpolation2D instance = new KrigingInterpolation2D(lat, lon, y, 1.1); assertEquals(4.5469676472147835, instance.interpolate(30.699, -85.327), 1E-7); }
@Override public InputFile newInputFile(String path) { return S3InputFile.fromLocation(path, client(), s3FileIOProperties, metrics); }
@Test public void testNewInputFile() throws IOException { String location = "s3://bucket/path/to/file.txt"; byte[] expected = new byte[1024 * 1024]; random.nextBytes(expected); InputFile in = s3FileIO.newInputFile(location); assertThat(in.exists()).isFalse(); OutputFile out = s3FileIO.newOutputFile(location); try (OutputStream os = out.createOrOverwrite()) { IOUtil.writeFully(os, ByteBuffer.wrap(expected)); } assertThat(in.exists()).isTrue(); byte[] actual = new byte[1024 * 1024]; try (InputStream is = in.newStream()) { IOUtil.readFully(is, actual, 0, expected.length); } assertThat(actual).isEqualTo(expected); s3FileIO.deleteFile(in); assertThat(s3FileIO.newInputFile(location).exists()).isFalse(); }
protected String encrypt(String secretStr, int iterations) throws Exception { SecureRandom secureRandom = new SecureRandom(); byte[] salt = new byte[saltLengthBytes]; secureRandom.nextBytes(salt); byte[] encryptedVal = transform(Cipher.ENCRYPT_MODE, secretStr.getBytes(StandardCharsets.UTF_8), salt, iterations); return new String(Base64.getEncoder().encode(salt), StandardCharsets.UTF_8) + ":" + iterations + ":" + new String(Base64.getEncoder().encode(encryptedVal), StandardCharsets.UTF_8); }
@Test(expected = IllegalArgumentException.class) public void testEncryptionFailWithNullPassword() throws Exception { assumeDefaultAlgorithmsSupported(); AbstractPbeReplacer replacer = createAndInitReplacer(null, new Properties()); replacer.encrypt("test", 1); }
@Override @NonNull public String getKeySpace(Scheme scheme) { return ExtensionStoreUtil.buildStoreNamePrefix(scheme); }
@Test void getKeySpace() { var keySpace = indexSpecRegistry.getKeySpace(scheme); assertThat(keySpace).isEqualTo("/registry/test.halo.run/fakes"); }
@Override public PageResult<AiVideoTemplateDO> getAiVideoTemplatePage(AiVideoTemplatePageReqVO pageReqVO) { return aiVideoTemplateMapper.selectPage(pageReqVO); }
@Test @Disabled // TODO 请修改 null 为需要的值,然后删除 @Disabled 注解 public void testGetAiVideoTemplatePage() { // mock 数据 AiVideoTemplateDO dbAiVideoTemplate = randomPojo(AiVideoTemplateDO.class, o -> { // 等会查询到 o.setType(null); o.setValue(null); o.setCreateTime(null); }); aiVideoTemplateMapper.insert(dbAiVideoTemplate); // 测试 type 不匹配 aiVideoTemplateMapper.insert(cloneIgnoreId(dbAiVideoTemplate, o -> o.setType(null))); // 测试 value 不匹配 aiVideoTemplateMapper.insert(cloneIgnoreId(dbAiVideoTemplate, o -> o.setValue(null))); // 测试 createTime 不匹配 aiVideoTemplateMapper.insert(cloneIgnoreId(dbAiVideoTemplate, o -> o.setCreateTime(null))); // 准备参数 AiVideoTemplatePageReqVO reqVO = new AiVideoTemplatePageReqVO(); reqVO.setType(null); reqVO.setValue(null); reqVO.setCreateTime(buildBetweenTime(2023, 2, 1, 2023, 2, 28)); // 调用 PageResult<AiVideoTemplateDO> pageResult = aiVideoTemplateService.getAiVideoTemplatePage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbAiVideoTemplate, pageResult.getList().get(0)); }
public static void compareContextState(String filterName, SessionContext context, SessionContext copy) { // TODO - only comparing Attributes. Need to compare the messages too. // Ensure that the routingDebug property already exists, otherwise we'll have a ConcurrentModificationException // below getRoutingDebug(context); Iterator<String> it = context.keySet().iterator(); String key = it.next(); while (key != null) { if ((!key.equals("routingDebug") && !key.equals("requestDebug"))) { Object newValue = context.get(key); Object oldValue = copy.get(key); if (!(newValue instanceof ReferenceCounted) && !(oldValue instanceof ReferenceCounted)) { if (oldValue == null && newValue != null) { addRoutingDebug(context, "{" + filterName + "} added " + key + "=" + newValue.toString()); } else if (oldValue != null && newValue != null) { if (!(oldValue.equals(newValue))) { addRoutingDebug(context, "{" + filterName + "} changed " + key + "=" + newValue.toString()); } } } } if (it.hasNext()) { key = it.next(); } else { key = null; } } }
@Test void testNoCMEWhenComparingContexts() { final SessionContext context = new SessionContext(); final SessionContext copy = new SessionContext(); context.set("foo", "bar"); Debug.compareContextState("testfilter", context, copy); }
public AlarmCallback create(AlarmCallbackConfiguration configuration) throws ClassNotFoundException, AlarmCallbackConfigurationException { AlarmCallback alarmCallback = create(configuration.getType()); alarmCallback.initialize(new Configuration(configuration.getConfiguration())); return alarmCallback; }
@Test public void testCreateByClassName() throws Exception { String className = DummyAlarmCallback.class.getCanonicalName(); AlarmCallback alarmCallback = alarmCallbackFactory.create(className); assertNotNull(alarmCallback); assertTrue(alarmCallback instanceof DummyAlarmCallback); assertEquals(dummyAlarmCallback, alarmCallback); }
public static boolean match(final String matchUrls, final String realPath) { return MATCHER.match(matchUrls, realPath); }
@Test public void testPathMatch() { // test exact matching assertTrue(PathMatchUtils.match("test", "test")); assertTrue(PathMatchUtils.match("/test", "/test")); // test matching with ?'s assertTrue(PathMatchUtils.match("t?st", "test")); assertTrue(PathMatchUtils.match("??st", "test")); // test matching with *'s assertTrue(PathMatchUtils.match("*", "test")); assertTrue(PathMatchUtils.match("test*", "test")); assertTrue(PathMatchUtils.match("test*", "testTest")); assertFalse(PathMatchUtils.match("test*aaa", "testblaaab")); // test matching with **'s assertTrue(PathMatchUtils.match("/**", "/testing/testing")); assertTrue(PathMatchUtils.match("/test/**", "/test/test")); }
@Override public void startMediaRequest( @NonNull String[] mimeTypes, int requestId, @NonNull InsertionRequestCallback callback) { mCurrentRunningLocalProxy.dispose(); mCurrentRequest = requestId; mCurrentCallback = callback; final Intent pickingIntent = getMediaInsertRequestIntent(mimeTypes, requestId); mContext.startActivity(pickingIntent); }
@Test public void testIncorrectEmptyIntent() { mUnderTest.startMediaRequest(new String[] {"media/png"}, 123, mCallback); mShadowApplication.getRegisteredReceivers().stream() .filter( wrapper -> wrapper.broadcastReceiver instanceof RemoteInsertionImpl.MediaInsertionAvailableReceiver) .map(ShadowApplication.Wrapper::getBroadcastReceiver) .findFirst() .get() .onReceive( ApplicationProvider.getApplicationContext(), createReceiverIntent(null, null, 0)); Mockito.verifyZeroInteractions(mCallback); }
@Override public long doRemoteFunction(int value) { long waitTime = (long) Math.floor(randomProvider.random() * 1000); try { sleep(waitTime); } catch (InterruptedException e) { LOGGER.error("Thread sleep state interrupted", e); Thread.currentThread().interrupt(); } return waitTime <= THRESHOLD ? value * 10 : RemoteServiceStatus.FAILURE.getRemoteServiceStatusValue(); }
@Test void testFailedCall() { var remoteService = new RemoteService(new StaticRandomProvider(0.21)); var result = remoteService.doRemoteFunction(10); assertEquals(RemoteServiceStatus.FAILURE.getRemoteServiceStatusValue(), result); }
@Udf public <T> List<T> remove( @UdfParameter(description = "Array of values") final List<T> array, @UdfParameter(description = "Value to remove") final T victim) { if (array == null) { return null; } return array.stream() .filter(el -> !Objects.equals(el, victim)) .collect(Collectors.toList()); }
@Test public void shouldRemoveDoubles() { final List<Double> input1 = Arrays.asList(1.1, 2.99, 1.1, 3.0); final Double input2 = 1.1; final List<Double> result = udf.remove(input1, input2); assertThat(result, contains(2.99, 3.0)); }
public static int getCloseTimeout(URL url) { String configuredCloseTimeout = System.getProperty(Constants.CLOSE_TIMEOUT_CONFIG_KEY); int defaultCloseTimeout = -1; if (StringUtils.isNotEmpty(configuredCloseTimeout)) { try { defaultCloseTimeout = Integer.parseInt(configuredCloseTimeout); } catch (NumberFormatException e) { // use default heartbeat } } if (defaultCloseTimeout < 0) { defaultCloseTimeout = getIdleTimeout(url); } int closeTimeout = url.getParameter(Constants.CLOSE_TIMEOUT_KEY, defaultCloseTimeout); int heartbeat = getHeartbeat(url); if (closeTimeout < heartbeat * 2) { throw new IllegalStateException("closeTimeout < heartbeatInterval * 2"); } return closeTimeout; }
@Test void testConfiguredClose() { System.setProperty(Constants.CLOSE_TIMEOUT_CONFIG_KEY, "180000"); URL url = URL.valueOf("dubbo://127.0.0.1:12345"); Assertions.assertEquals(180000, UrlUtils.getCloseTimeout(url)); System.clearProperty(Constants.HEARTBEAT_CONFIG_KEY); }
public HttpResponse execute(HttpRequest request) throws IOException { final HttpURLConnection urlConn = createConnection(request); urlConn.connect(); final HttpHeaders headers = new HttpHeaders(urlConn.getHeaderFields()); final HttpRespStatus status = HttpRespStatus.valueOf(urlConn.getResponseCode()); if (SUCCESS.contains(status.code())) { return new HttpResponse(status, headers, toBytes(urlConn.getInputStream())); } else { return new HttpResponse(status, headers, toBytes(urlConn.getErrorStream())); } }
@Test public void testPost() { Map<String, List<String>> params = MapUtils.createHashMap(2); params.put("db", Collections.singletonList("http")); HttpRequest req = new HttpRequest.Builder() .url("localhost:8686/write") .params(params) .post("cpu_load_short,host=server01,region=us-west value=0.64 1434055562000000000\n" + "cpu_load_short,host=server02,region=us-west value=0.96 1434055562000000000") .build(); for (int i = 0; i < 10; i++) { try { HttpResponse resp = httpClient.execute(req); Assert.assertEquals(OK, resp.getStatus()); Assert.assertEquals(RESPONSE_BODY, resp.getBodyString()); } catch (Exception e) { e.printStackTrace(); } } }
@Override public void addSinglePath(AlluxioURI path) { MountInfo mountInfo = getMountInfo(path); if (mountInfo == null) { return; } addCacheEntry(path.getPath(), mountInfo); }
@Test public void metricCacheSize() throws Exception { MetricsSystem.resetCountersAndGauges(); AsyncUfsAbsentPathCache cache = new TestAsyncUfsAbsentPathCache(mMountTable, THREADS, 1); // this metric is cached, sleep some time before reading it Callable<Long> cacheSize = () -> { Thread.sleep(2); return (long) MetricsSystem.METRIC_REGISTRY.getGauges() .get(MetricKey.MASTER_ABSENT_CACHE_SIZE.getName()).getValue(); }; cache.addSinglePath(new AlluxioURI("/mnt/1")); assertEquals(1, (long) cacheSize.call()); cache.addSinglePath(new AlluxioURI("/mnt/2")); assertEquals(2, (long) cacheSize.call()); cache.addSinglePath(new AlluxioURI("/mnt/3")); assertEquals(3, (long) cacheSize.call()); cache.addSinglePath(new AlluxioURI("/mnt/4")); assertEquals(3, (long) cacheSize.call()); }
public static Optional<EfestoOutputPMML> executeEfestoInput(EfestoInput<PMMLRequestData> toEvaluate, EfestoRuntimeContext runtimeContext) { PMMLRuntimeContext pmmlContext; if (runtimeContext instanceof PMMLRuntimeContext) { pmmlContext = (PMMLRuntimeContext) runtimeContext; } else { pmmlContext = getPMMLRuntimeContext(toEvaluate.getInputData(), runtimeContext.getGeneratedResourcesMap()); } EfestoInputPMML efestoInputPMML = getEfestoInputPMML(toEvaluate.getModelLocalUriId(), pmmlContext); return executeEfestoInputPMML(efestoInputPMML, pmmlContext); }
@Test void executeEfestoInput() { modelLocalUriId = getModelLocalUriIdFromPmmlIdFactory(FILE_NAME, MODEL_NAME); BaseEfestoInput<PMMLRequestData> inputPMML = new BaseEfestoInput<>(modelLocalUriId, getPMMLRequestDataWithInputData(MODEL_NAME , FILE_NAME)); Optional<EfestoOutputPMML> retrieved = PMMLRuntimeHelper.executeEfestoInput(inputPMML, getPMMLContext(FILE_NAME, MODEL_NAME, memoryCompilerClassLoader)); assertThat(retrieved).isNotNull().isPresent(); commonEvaluateEfestoOutputPMML(retrieved.get(), inputPMML); }
public static KsqlAggregateFunction<?, ?, ?> resolveAggregateFunction( final FunctionRegistry functionRegistry, final FunctionCall functionCall, final LogicalSchema schema, final KsqlConfig config ) { try { final ExpressionTypeManager expressionTypeManager = new ExpressionTypeManager(schema, functionRegistry); final List<SqlType> args = functionCall.getArguments().stream() .map(expressionTypeManager::getExpressionSqlType) .collect(Collectors.toList()); final AggregateFunctionFactory.FunctionSource func = functionRegistry .getAggregateFactory(functionCall.getName()) .getFunction(args); final int totalArgs = functionCall.getArguments().size(); // All non-constant UDAF arguments must be column references final List<Integer> argIndices = functionCall.getArguments().stream() .limit(totalArgs - func.initArgs) .map((arg) -> { final Optional<Column> column; if (arg instanceof UnqualifiedColumnReferenceExp) { final UnqualifiedColumnReferenceExp colRef = (UnqualifiedColumnReferenceExp) arg; column = schema.findValueColumn(colRef.getColumnName()); } else { // assume that it is a column reference with no alias column = schema.findValueColumn(ColumnName.of(arg.toString())); } return column.orElseThrow( () -> new KsqlException("Could not find column for expression: " + arg) ); }).map(Column::index).collect(Collectors.toList()); return func.source.apply(createAggregateFunctionInitArgs( func.initArgs, argIndices, functionCall, config )); } catch (final Exception e) { throw new KsqlException("Failed to create aggregate function: " + functionCall, e); } }
@Test public void shouldResolveUDAF() { // When: final KsqlAggregateFunction returned = UdafUtil.resolveAggregateFunction(functionRegistry, FUNCTION_CALL, SCHEMA, KsqlConfig.empty()); // Then: assertThat(returned, is(function)); }
@Override public <T> List<T> getExtensions(Class<T> type) { return getExtensions(extensionFinder.find(type)); }
@Test public void getExtensionsByType() { ExtensionFinder extensionFinder = mock(ExtensionFinder.class); List<ExtensionWrapper<TestExtensionPoint>> extensionList = new ArrayList<>(1); extensionList.add(new ExtensionWrapper<>(new ExtensionDescriptor(0, TestExtension.class), new DefaultExtensionFactory())); when(extensionFinder.find(TestExtensionPoint.class)).thenReturn(extensionList); pluginManager.extensionFinder = extensionFinder; List<TestExtensionPoint> extensions = pluginManager.getExtensions(TestExtensionPoint.class); assertEquals(1, extensions.size()); }
public static void validate(FilterPredicate predicate, MessageType schema) { Objects.requireNonNull(predicate, "predicate cannot be null"); Objects.requireNonNull(schema, "schema cannot be null"); predicate.accept(new SchemaCompatibilityValidator(schema)); }
@Test public void testTwiceDeclaredColumn() { validate(eq(stringC, Binary.fromString("larry")), schema); try { validate(complexMixedType, schema); fail("this should throw"); } catch (IllegalArgumentException e) { assertEquals( "Column: x.bar was provided with different types in the same predicate. Found both: (class java.lang.Integer, class java.lang.Long)", e.getMessage()); } }
static int getBitSeq(int by, int startBit, int bitSize) { int mask = ((1 << bitSize) - 1); return (by >>> startBit) & mask; }
@Test void testGetBitSeq() { assertEquals(Integer.parseInt("11111111", 2), getBitSeq(Integer.parseInt("11111111", 2), 0, 8)); assertEquals(Integer.parseInt("00000000", 2), getBitSeq(Integer.parseInt("00000000", 2), 0, 8)); assertEquals(Integer.parseInt("1", 2), getBitSeq(Integer.parseInt("11111111", 2), 0, 1)); assertEquals(Integer.parseInt("0", 2), getBitSeq(Integer.parseInt("00000000", 2), 0, 1)); assertEquals(Integer.parseInt("001", 2), getBitSeq(Integer.parseInt("00110001", 2), 0, 3)); assertEquals(Integer.parseInt("10101010", 2), getBitSeq(Integer.parseInt("10101010", 2), 0, 8)); assertEquals(Integer.parseInt("10", 2), getBitSeq(Integer.parseInt("10101010", 2), 0, 2)); assertEquals(Integer.parseInt("01", 2), getBitSeq(Integer.parseInt("10101010", 2), 1, 2)); assertEquals(Integer.parseInt("10", 2), getBitSeq(Integer.parseInt("10101010", 2), 2, 2)); assertEquals(Integer.parseInt("101", 2), getBitSeq(Integer.parseInt("10101010", 2), 3, 3)); assertEquals(Integer.parseInt("1010101", 2), getBitSeq(Integer.parseInt("10101010", 2), 1, 7)); assertEquals(Integer.parseInt("01", 2), getBitSeq(Integer.parseInt("10101010", 2), 3, 2)); assertEquals(Integer.parseInt("00110001", 2), getBitSeq(Integer.parseInt("00110001", 2), 0, 8)); assertEquals(Integer.parseInt("10001", 2), getBitSeq(Integer.parseInt("00110001", 2), 0, 5)); assertEquals(Integer.parseInt("0011", 2), getBitSeq(Integer.parseInt("00110001", 2), 4, 4)); assertEquals(Integer.parseInt("110", 2), getBitSeq(Integer.parseInt("00110001", 2), 3, 3)); assertEquals(Integer.parseInt("00", 2), getBitSeq(Integer.parseInt("00110001", 2), 6, 2)); assertEquals(Integer.parseInt("1111", 2), getBitSeq(Integer.parseInt("11110000", 2), 4, 4)); assertEquals(Integer.parseInt("11", 2), getBitSeq(Integer.parseInt("11110000", 2), 6, 2)); assertEquals(Integer.parseInt("0000", 2), getBitSeq(Integer.parseInt("11110000", 2), 0, 4)); }
static int readIntegerLittleEndian(byte[] data, int offset) { return Byte.toUnsignedInt(data[offset]) | (Byte.toUnsignedInt(data[offset + 1]) << 8) | (Byte.toUnsignedInt(data[offset + 2]) << 16) | (Byte.toUnsignedInt(data[offset + 3]) << 24); }
@Test public void testReadIntegerLittleEndian() { testReadIntegerLittleEndian(bytes(0, 0, 0, 0), 0, 0); testReadIntegerLittleEndian(bytes(42, 0, 0, 0), 0, 42); testReadIntegerLittleEndian(bytes(13, 42, 0, 0, 0, 14), 1, 42); testReadIntegerLittleEndian(bytes(13, 0xFa, 0xFF, 0xFF, 0x7F, 14), 1, Integer.MAX_VALUE - 5); testReadIntegerLittleEndian(bytes(13, 0xF9, 0xFF, 0xFF, 0xFF, 14), 1, -7); }
public <InputT, OutputT, CollectionT extends PCollection<? extends InputT>> DataSet<OutputT> applyBeamPTransform( DataSet<InputT> input, PTransform<CollectionT, PCollection<OutputT>> transform) { return (DataSet) getNonNull( applyBeamPTransformInternal( ImmutableMap.of("input", input), (pipeline, map) -> (CollectionT) getNonNull(map, "input"), (output) -> ImmutableMap.of("output", output), transform, input.getExecutionEnvironment()), "output"); }
@Test public void testCustomCoder() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment(); DataSet<String> input = env.fromCollection(ImmutableList.of("a", "b", "c")); DataSet<String> result = new BeamFlinkDataSetAdapter() .applyBeamPTransform( input, new PTransform<PCollection<String>, PCollection<String>>() { @Override public PCollection<String> expand(PCollection<String> input) { return input.apply(withPrefix("x")).setCoder(new MyCoder()); } }); assertThat(result.collect(), containsInAnyOrder("xa", "xb", "xc")); }
@Override public ResponseHeader execute() throws SQLException { check(sqlStatement, connectionSession.getConnectionContext().getGrantee()); if (isDropCurrentDatabase(sqlStatement.getDatabaseName())) { checkSupportedDropCurrentDatabase(connectionSession); connectionSession.setCurrentDatabaseName(null); } if (ProxyContext.getInstance().databaseExists(sqlStatement.getDatabaseName())) { ProxyContext.getInstance().getContextManager().getPersistServiceFacade().getMetaDataManagerPersistService().dropDatabase(sqlStatement.getDatabaseName()); } return new UpdateResponseHeader(sqlStatement); }
@Test void assertExecuteDropOtherDatabase() throws SQLException { when(connectionSession.getUsedDatabaseName()).thenReturn("foo_db"); when(sqlStatement.getDatabaseName()).thenReturn("bar_db"); ResponseHeader responseHeader = handler.execute(); verify(connectionSession, times(0)).setCurrentDatabaseName(null); assertThat(responseHeader, instanceOf(UpdateResponseHeader.class)); }
public static <T> RBFNetwork<T> fit(T[] x, double[] y, RBF<T>[] rbf) { return fit(x, y, rbf, false); }
@Test public void testCPU() { System.out.println("CPU"); MathEx.setSeed(19650218); // to get repeatable results. double[][] x = MathEx.clone(CPU.x); MathEx.standardize(x); RegressionValidations<RBFNetwork<double[]>> result = CrossValidation.regression(10, x, CPU.y, (xi, yi) -> RBFNetwork.fit(xi, yi, RBF.fit(xi, 20, 5.0))); System.out.println(result); assertEquals(18.0997, result.avg.rmse, 1E-4); }
@Override public boolean process(NacosTask task) { MergeDataTask mergeTask = (MergeDataTask) task; final String dataId = mergeTask.dataId; final String group = mergeTask.groupId; final String tenant = mergeTask.tenant; final String tag = mergeTask.tag; final String clientIp = mergeTask.getClientIp(); try { List<ConfigInfoAggr> datumList = new ArrayList<>(); int rowCount = configInfoAggrPersistService.aggrConfigInfoCount(dataId, group, tenant); int pageCount = (int) Math.ceil(rowCount * 1.0 / PAGE_SIZE); for (int pageNo = 1; pageNo <= pageCount; pageNo++) { Page<ConfigInfoAggr> page = configInfoAggrPersistService.findConfigInfoAggrByPage(dataId, group, tenant, pageNo, PAGE_SIZE); if (page != null) { datumList.addAll(page.getPageItems()); LOGGER.info("[merge-query] {}, {}, size/total={}/{}", dataId, group, datumList.size(), rowCount); } } final Timestamp time = TimeUtils.getCurrentTime(); if (datumList.size() > 0) { // merge ConfigInfo cf = merge(dataId, group, tenant, datumList); configInfoPersistService.insertOrUpdate(null, null, cf, null); LOGGER.info("[merge-ok] {}, {}, size={}, length={}, md5={}, content={}", dataId, group, datumList.size(), cf.getContent().length(), cf.getMd5(), ContentUtils.truncateContent(cf.getContent())); ConfigTraceService.logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), ConfigTraceService.PERSISTENCE_EVENT, ConfigTraceService.PERSISTENCE_TYPE_MERGE, cf.getContent()); } else { String eventType; // remove if (StringUtils.isBlank(tag)) { eventType = ConfigTraceService.PERSISTENCE_EVENT; configInfoPersistService.removeConfigInfo(dataId, group, tenant, clientIp, null); } else { eventType = ConfigTraceService.PERSISTENCE_EVENT_TAG + "-" + tag; configInfoTagPersistService.removeConfigInfoTag(dataId, group, tenant, tag, clientIp, null); } LOGGER.warn( "[merge-delete] delete config info because no datum. dataId=" + dataId + ", groupId=" + group); ConfigTraceService.logPersistenceEvent(dataId, group, tenant, null, time.getTime(), InetUtils.getSelfIP(), eventType, ConfigTraceService.PERSISTENCE_TYPE_REMOVE, null); } NotifyCenter.publishEvent(new ConfigDataChangeEvent(false, dataId, group, tenant, tag, time.getTime())); } catch (Exception e) { mergeService.addMergeTask(dataId, group, tenant, mergeTask.getClientIp()); LOGGER.info("[merge-error] " + dataId + ", " + group + ", " + e.toString(), e); } return true; }
@Test void testTagMergerError() throws InterruptedException { String dataId = "dataId12345"; String group = "group123"; String tenant = "tenant1234"; when(configInfoAggrPersistService.aggrConfigInfoCount(eq(dataId), eq(group), eq(tenant))).thenThrow(new NullPointerException()); MergeDataTask mergeDataTask = new MergeDataTask(dataId, group, tenant, "127.0.0.1"); mergeTaskProcessor.process(mergeDataTask); Mockito.verify(mergeDatumService, times(1)).addMergeTask(eq(dataId), eq(group), eq(tenant), eq("127.0.0.1")); }
@Override public T deserialize(final String topic, final byte[] bytes) { try { if (bytes == null) { return null; } // don't use the JsonSchemaConverter to read this data because // we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS, // which is not currently available in the standard converters final JsonNode value = isJsonSchema ? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class) : MAPPER.readTree(bytes); final Object coerced = enforceFieldType( "$", new JsonValueContext(value, schema) ); if (LOG.isTraceEnabled()) { LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced); } return SerdeUtils.castToTargetType(coerced, targetType); } catch (final Exception e) { // Clear location in order to avoid logging data, for security reasons if (e instanceof JsonParseException) { ((JsonParseException) e).clearLocation(); } throw new SerializationException( "Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e); } }
@Test public void shouldThrowIfFieldCanNotBeCoerced() { // Given: final Map<String, Object> value = new HashMap<>(AN_ORDER); value.put(ORDERTIME, true); final byte[] bytes = serializeJson(value); // When: final Exception e = assertThrows( SerializationException.class, () -> deserializer.deserialize(SOME_TOPIC, bytes) ); // Then: assertThat(e.getCause(), (hasMessage(startsWith( "Can't convert type. sourceType: BooleanNode, requiredType: BIGINT")))); }
@SuppressWarnings("FutureReturnValueIgnored") public void start() { running.set(true); configFetcher.start(); memoryMonitor.start(); streamingWorkerHarness.start(); sampler.start(); workerStatusReporter.start(); activeWorkRefresher.start(); }
@Test(timeout = 10000) public void testNumberOfWorkerHarnessThreadsIsHonored() throws Exception { int expectedNumberOfThreads = 5; List<ParallelInstruction> instructions = Arrays.asList( makeSourceInstruction(StringUtf8Coder.of()), makeDoFnInstruction(blockingFn, 0, StringUtf8Coder.of()), makeSinkInstruction(StringUtf8Coder.of(), 0)); StreamingDataflowWorker worker = makeWorker( defaultWorkerParams("--numberOfWorkerHarnessThreads=" + expectedNumberOfThreads) .setInstructions(instructions) .publishCounters() .build()); worker.start(); for (int i = 0; i < expectedNumberOfThreads * 2; ++i) { server.whenGetWorkCalled().thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i))); } // This will fail to complete if the number of threads is less than the amount of work. // Forcing this test to timeout. BlockingFn.counter.acquire(expectedNumberOfThreads); // Attempt to acquire an additional permit, if we were able to then that means // too many items were being processed concurrently. if (BlockingFn.counter.tryAcquire(500, TimeUnit.MILLISECONDS)) { fail( "Expected number of threads " + expectedNumberOfThreads + " does not match actual " + "number of work items processed concurrently " + BlockingFn.callCounter.get() + "."); } BlockingFn.blocker.countDown(); }
@Override public void increment() { increment(1l); }
@Test(expected = IllegalArgumentException.class) public void incrementByNegativeLongValue() { longCounter.increment(Long.valueOf(-100)); }
Map<String, String> execute(ServerWebExchange exchange, StainingRule stainingRule) { if (stainingRule == null) { return Collections.emptyMap(); } List<StainingRule.Rule> rules = stainingRule.getRules(); if (CollectionUtils.isEmpty(rules)) { return Collections.emptyMap(); } Map<String, String> parsedLabels = new HashMap<>(); for (StainingRule.Rule rule : rules) { List<Condition> conditions = rule.getConditions(); Set<String> keys = new HashSet<>(); conditions.forEach(condition -> keys.add(condition.getKey())); Map<String, String> actualValues = SpringWebExpressionLabelUtils.resolve(exchange, keys); if (!ConditionUtils.match(actualValues, conditions)) { continue; } parsedLabels.putAll(KVPairUtils.toMap(rule.getLabels())); } return parsedLabels; }
@Test public void testNoStainingRule() { RuleStainingExecutor executor = new RuleStainingExecutor(); assertThat(executor.execute(null, null)).isEmpty(); assertThat(executor.execute(null, new StainingRule())).isEmpty(); }
public T poll() { T highPriority = _highPriority.pollFirst(); return (highPriority != null) ? highPriority : _lowPriority.pollFirst(); }
@Test public void testPollOnEmpty() { final LIFOBiPriorityQueue<?> queue = new LIFOBiPriorityQueue<>(); assertNull(queue.poll()); }