focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@SuppressWarnings("MethodMayBeStatic") // Non-static to allow DI/mocking public QueryContext.Stacker buildNodeContext(final String context) { return new QueryContext.Stacker() .push(context); }
@Test public void shouldBuildNodeContext() { // When: final Stacker result = runtimeBuildContext.buildNodeContext("some-id"); // Then: assertThat(result, is(new Stacker().push("some-id"))); }
static String capitalizeFirst(String string) { if (string.isEmpty()) { return string; } return string.substring(0, 1).toUpperCase(Locale.ENGLISH) + string.substring(1); }
@Test public void testCapitalizeFirst() { assertEquals("", MessageGenerator.capitalizeFirst("")); assertEquals("AbC", MessageGenerator.capitalizeFirst("abC")); }
@Override public COSBase getCOSObject() { COSArray cos = new COSArray(); cos.add(COSArray.of(array)); cos.add(COSInteger.get(phase)); return cos; }
@Test void testGetCOSObject() { COSArray ar = new COSArray(); ar.add(COSInteger.ONE); ar.add(COSInteger.TWO); PDLineDashPattern dash = new PDLineDashPattern(ar, 3); COSArray dashBase = (COSArray) dash.getCOSObject(); COSArray dashArray = (COSArray) dashBase.getObject(0); assertEquals(2, dashBase.size()); assertEquals(2, dashArray.size()); assertEquals(COSFloat.ONE, dashArray.get(0)); assertEquals(new COSFloat(2), dashArray.get(1)); assertEquals(COSInteger.THREE, dashBase.get(1)); System.out.println(dash); }
public @Nullable Struct getPartition(String partitionToken) { Statement statement; if (this.isPostgres()) { statement = Statement.newBuilder( "SELECT * FROM \"" + metadataTableName + "\" WHERE \"" + COLUMN_PARTITION_TOKEN + "\" = $1") .bind("p1") .to(partitionToken) .build(); } else { statement = Statement.newBuilder( "SELECT * FROM " + metadataTableName + " WHERE " + COLUMN_PARTITION_TOKEN + " = @partition") .bind("partition") .to(partitionToken) .build(); } try (ResultSet resultSet = databaseClient.singleUse().executeQuery(statement, Options.tag("query=getPartition"))) { if (resultSet.next()) { return resultSet.getCurrentRowAsStruct(); } return null; } }
@Test public void testInTransactionContextGetPartitionWithPartitions() { ResultSet resultSet = mock(ResultSet.class); when(transaction.executeQuery(any(), anyObject())).thenReturn(resultSet); when(resultSet.next()).thenReturn(true); when(resultSet.getCurrentRowAsStruct()).thenReturn(Struct.newBuilder().build()); assertNotNull(inTransactionContext.getPartition(PARTITION_TOKEN)); }
static Map<String, Object> classFilterGenerator(ClassFilter classFilter) { if (classFilter == null || classFilter.isEmpty()) { return Collections.emptyMap(); } Map<String, Object> classFilterMap = new LinkedHashMap<>(); List<String> packagesAsList = new ArrayList<>(); List<String> classesAsList = new ArrayList<>(); List<String> prefixesAsList = new ArrayList<>(); for (String p : classFilter.getPackages()) { packagesAsList.add(p); } for (String c : classFilter.getClasses()) { classesAsList.add(c); } for (String p : classFilter.getPrefixes()) { prefixesAsList.add(p); } classFilterMap.put("package", packagesAsList); classFilterMap.put("class", classesAsList); classFilterMap.put("prefix", prefixesAsList); return classFilterMap; }
@Test public void testClassFilterConfig() { ClassFilter filter = createClassFilter(); Map<String, Object> filterAsMap = DynamicConfigYamlGenerator.classFilterGenerator(filter); assertClassFilterAsMap(filterAsMap); }
public static BuildInfo getBuildInfo() { if (Overrides.isEnabled()) { // never use cache when override is enabled -> we need to re-parse everything Overrides overrides = Overrides.fromProperties(); return getBuildInfoInternalVersion(overrides); } return BUILD_INFO_CACHE; }
@Test public void testOverrideBuildNumber() { System.setProperty("hazelcast.build", "2"); BuildInfo buildInfo = BuildInfoProvider.getBuildInfo(); String version = buildInfo.getVersion(); String build = buildInfo.getBuild(); int buildNumber = buildInfo.getBuildNumber(); assertTrue(buildInfo.toString(), VERSION_PATTERN.matcher(version).matches()); assertEquals("2", build); assertEquals(2, buildNumber); assertFalse(buildInfo.toString(), buildInfo.isEnterprise()); }
public Set<EntityDescriptor> resolveEntities(Collection<EntityDescriptor> unresolvedEntities) { final MutableGraph<EntityDescriptor> dependencyGraph = GraphBuilder.directed() .allowsSelfLoops(false) .nodeOrder(ElementOrder.insertion()) .build(); unresolvedEntities.forEach(dependencyGraph::addNode); final HashSet<EntityDescriptor> resolvedEntities = new HashSet<>(); final MutableGraph<EntityDescriptor> finalDependencyGraph = resolveDependencyGraph(dependencyGraph, resolvedEntities); LOG.debug("Final dependency graph: {}", finalDependencyGraph); return finalDependencyGraph.nodes(); }
@Test public void resolveEntitiesWithEmptyInput() { final Set<EntityDescriptor> resolvedEntities = contentPackService.resolveEntities(Collections.emptySet()); assertThat(resolvedEntities).isEmpty(); }
@Override public List<AwsEndpoint> getClusterEndpoints() { List<AwsEndpoint> result = new ArrayList<>(); Applications applications = applicationsSource.getApplications( transportConfig.getApplicationsResolverDataStalenessThresholdSeconds(), TimeUnit.SECONDS); if (applications != null && vipAddress != null) { List<InstanceInfo> validInstanceInfos = applications.getInstancesByVirtualHostName(vipAddress); for (InstanceInfo instanceInfo : validInstanceInfos) { if (instanceInfo.getStatus() == InstanceInfo.InstanceStatus.UP) { AwsEndpoint endpoint = ResolverUtils.instanceInfoToEndpoint(clientConfig, transportConfig, instanceInfo); if (endpoint != null) { result.add(endpoint); } } } } logger.debug("Retrieved endpoint list {}", result); return result; }
@Test public void testVipDoesNotExist() { vipAddress = "doNotExist"; when(transportConfig.getReadClusterVip()).thenReturn(vipAddress); resolver = new ApplicationsResolver( // recreate the resolver as desired config behaviour has changed clientConfig, transportConfig, applicationsSource, transportConfig.getReadClusterVip() ); when(applicationsSource.getApplications(anyInt(), eq(TimeUnit.SECONDS))).thenReturn(applications); List<AwsEndpoint> endpoints = resolver.getClusterEndpoints(); assertThat(endpoints.isEmpty(), is(true)); }
@HighFrequencyInvocation public EncryptTable getEncryptTable(final String tableName) { return findEncryptTable(tableName).orElseThrow(() -> new EncryptTableNotFoundException(tableName)); }
@Test void assertGetNotExistedEncryptTable() { assertThrows(EncryptTableNotFoundException.class, () -> new EncryptRule("foo_db", createEncryptRuleConfiguration()).getEncryptTable("not_existed_tbl")); }
@Override public <T> ResponseFuture<T> sendRequest(Request<T> request, RequestContext requestContext) { FutureCallback<Response<T>> callback = new FutureCallback<>(); sendRequest(request, requestContext, callback); return new ResponseFutureImpl<>(callback); }
@SuppressWarnings("deprecation") @Test(dataProvider = TestConstants.RESTLI_PROTOCOL_1_2_PREFIX + "sendRequestAndGetResponseOptions") public void testRestLiResponseFuture(SendRequestOption sendRequestOption, GetResponseOption getResponseOption, TimeoutOption timeoutOption, ProtocolVersionOption versionOption, ProtocolVersion protocolVersion, String errorResponseHeaderName, ContentType contentType) throws ExecutionException, RemoteInvocationException, TimeoutException, InterruptedException, IOException { final String ERR_KEY = "someErr"; final String ERR_VALUE = "WHOOPS!"; final String ERR_MSG = "whoops2"; final int HTTP_CODE = 200; final int APP_CODE = 666; final String CODE = "INVALID_INPUT"; final String DOC_URL = "https://example.com/errors/invalid-input"; final String REQUEST_ID = "abc123"; RestClient client = mockClient(ERR_KEY, ERR_VALUE, ERR_MSG, HTTP_CODE, APP_CODE, CODE, DOC_URL, REQUEST_ID, protocolVersion, errorResponseHeaderName); Request<ErrorResponse> request = mockRequest(ErrorResponse.class, versionOption, contentType); RequestBuilder<Request<ErrorResponse>> requestBuilder = mockRequestBuilder(request); ResponseFuture<ErrorResponse> future = sendRequest(sendRequestOption, determineErrorHandlingBehavior(getResponseOption), client, request, requestBuilder); Response<ErrorResponse> response = getOkResponse(getResponseOption, future, timeoutOption); ErrorResponse e = response.getEntity(); Assert.assertNull(response.getError()); Assert.assertFalse(response.hasError()); Assert.assertEquals(HTTP_CODE, response.getStatus()); Assert.assertEquals(ERR_VALUE, e.getErrorDetails().data().getString(ERR_KEY)); Assert.assertEquals(APP_CODE, e.getServiceErrorCode().intValue()); Assert.assertEquals(ERR_MSG, e.getMessage()); Assert.assertEquals(CODE, e.getCode()); Assert.assertEquals(DOC_URL, e.getDocUrl()); Assert.assertEquals(REQUEST_ID, e.getRequestId()); Assert.assertEquals(EmptyRecord.class.getCanonicalName(), e.getErrorDetailType()); }
public static Response build(int errorCode, String msg) { ErrorResponse response = new ErrorResponse(); response.setErrorInfo(errorCode, msg); return response; }
@Test void testBuildWithErrorCode() { int errorCode = 500; String msg = "err msg"; Response response = ErrorResponse.build(errorCode, msg); assertEquals(errorCode, response.getErrorCode()); assertEquals(errorCode, response.getResultCode()); assertEquals(msg, response.getMessage()); }
public Optional<Details> runForeachBatch( Workflow workflow, Long internalId, long workflowVersionId, RunProperties runProperties, String foreachStepId, ForeachArtifact artifact, List<RunRequest> requests, List<Long> instanceIds, int batchSize) { if (ObjectHelper.isCollectionEmptyOrNull(requests)) { return Optional.empty(); } Checks.checkTrue( requests.size() == instanceIds.size(), "Run request list size [%s] must match instance id list size [%s]", requests.size(), instanceIds.size()); List<WorkflowInstance> instances; if (artifact.isFreshRun()) { instances = createStartForeachInstances( workflow, internalId, workflowVersionId, artifact.getForeachRunId(), runProperties, requests, instanceIds); } else { instances = createRestartForeachInstances( workflow, internalId, workflowVersionId, runProperties, foreachStepId, artifact, requests, instanceIds); } if (ObjectHelper.isCollectionEmptyOrNull(instances)) { return Optional.empty(); } return instanceDao.runWorkflowInstances(workflow.getId(), instances, batchSize); }
@Test public void testCreateRestartForeachInstancesUpstreamModeFromBeginningWithoutStepParamOverride() { doNothing().when(workflowHelper).updateWorkflowInstance(any(), any()); when(instanceDao.getLatestWorkflowInstanceRun(anyString(), anyLong())) .thenReturn(new WorkflowInstance()); ForeachArtifact artifact = new ForeachArtifact(); artifact.setRunPolicy(RunPolicy.RESTART_FROM_BEGINNING); artifact.setTotalLoopCount(10); artifact.setForeachWorkflowId("maestro_foreach_x"); artifact.setAncestorIterationCount(null); artifact.setForeachRunId(3L); artifact.setForeachOverview(new ForeachStepOverview()); artifact.getForeachOverview().addOne(2, WorkflowInstance.Status.FAILED, null); RestartConfig restartConfig = RestartConfig.builder() .restartPolicy(RunPolicy.RESTART_FROM_BEGINNING) .downstreamPolicy(RunPolicy.RESTART_FROM_INCOMPLETE) .addRestartNode("maestro_foreach_x", 1, null) .build(); ForeachInitiator initiator = new ForeachInitiator(); UpstreamInitiator.Info parent = new UpstreamInitiator.Info(); parent.setWorkflowId("maestro_foreach_x"); parent.setInstanceId(1); parent.setRunId(1); parent.setStepId("foreach-step"); parent.setStepAttemptId(1); initiator.setAncestors(Collections.singletonList(parent)); RunRequest runRequest = RunRequest.builder() .initiator(initiator) .currentPolicy(RunPolicy.RESTART_FROM_INCOMPLETE) .restartConfig(restartConfig) .build(); Optional<Details> errors = actionHandler.runForeachBatch( definition.getWorkflow(), 123L, 10L, new RunProperties(), "foreach-step", artifact, Collections.singletonList(runRequest), Collections.singletonList(2L), 3); assertFalse(errors.isPresent()); verify(instanceDao, times(1)).runWorkflowInstances(any(), any(), anyInt()); ArgumentCaptor<RunRequest> captor = ArgumentCaptor.forClass(RunRequest.class); verify(workflowHelper, times(1)).updateWorkflowInstance(any(), captor.capture()); RunRequest res = captor.getValue(); assertEquals(RunPolicy.RESTART_FROM_BEGINNING, res.getCurrentPolicy()); assertNull(res.getRestartConfig()); }
public boolean isValid(String value) { if (value == null) { return false; } URI uri; // ensure value is a valid URI try { uri = new URI(value); } catch (URISyntaxException e) { return false; } // OK, perfom additional validation String scheme = uri.getScheme(); if (!isValidScheme(scheme)) { return false; } String authority = uri.getRawAuthority(); if ("file".equals(scheme) && (authority == null || "".equals(authority))) { // Special case - file: allows an empty authority return true; // this is a local file - nothing more to do here } else if ("file".equals(scheme) && authority != null && authority.contains(":")) { return false; } else { // Validate the authority if (!isValidAuthority(authority)) { return false; } } if (!isValidPath(uri.getRawPath())) { return false; } if (!isValidQuery(uri.getRawQuery())) { return false; } if (!isValidFragment(uri.getRawFragment())) { return false; } return true; }
@Test public void testValidator342() { UrlValidator urlValidator = new UrlValidator(); assertTrue(urlValidator.isValid("http://example.rocks/")); assertTrue(urlValidator.isValid("http://example.rocks")); }
public static String decimalFormat(String pattern, double value) { Assert.isTrue(isValid(value), "value is NaN or Infinite!"); return new DecimalFormat(pattern).format(value); }
@Test public void decimalFormatNaNTest2() { assertThrows(IllegalArgumentException.class, ()->{ final Double a = 0D; final Double b = 0D; Console.log(NumberUtil.decimalFormat("#%", a / b)); }); }
public static <T> T checkNotNull(T reference, String errorMessageTemplate, Object... errorMessageArgs) { if (reference == null) { // If either of these parameters is null, the right thing happens anyway throw new NullPointerException( format(errorMessageTemplate, errorMessageArgs)); } return reference; }
@Test void checkNotNullInputNullNotNullNullOutputNullPointerException() { assertThatExceptionOfType(NullPointerException.class).isThrownBy(() -> { // Arrange final Object reference = null; final String errorMessageTemplate = ""; final Object[] errorMessageArgs = null; Util.checkNotNull(reference, errorMessageTemplate, errorMessageArgs); // Method is not expected to return due to exception thrown }); // Method is not expected to return due to exception thrown }
@Operation(summary = "OICD-session") @PostMapping(value = "/{id}", consumes = "application/json") @ResponseBody public StatusResponse oicdLoginSession(@PathVariable("id") String oidcSessionId, @Valid @RequestBody LoginRequest request) throws ChangeSetPersister.NotFoundException { Optional<OpenIdSession> session = repository.findById(oidcSessionId); if (!session.isPresent()) { throw new ChangeSetPersister.NotFoundException(); } return service.userLogin(session.get(), request.getAccountId(), request.getAuthenticationLevel(), request.getAuthenticationStatus()); }
@Test void oicdLoginSessionNotFound() { assertThrows(ChangeSetPersister.NotFoundException.class, () -> controller.oicdLoginSession(SESSION_ID, loginRequest())); verify(service, times(0)).userLogin(session, 1l, "20", "success"); }
public Pet photoUrls(List<String> photoUrls) { this.photoUrls = photoUrls; return this; }
@Test public void photoUrlsTest() { // TODO: test photoUrls }
@Udf public <T> String toJsonString(@UdfParameter final T input) { return toJson(input); }
@Test public void shouldSerializeLong() { // When: final String result = udf.toJsonString(123L); // Then: assertEquals("123", result); }
@Deprecated public static <ElemT, ViewT> PCollectionView<ViewT> getView( AppliedPTransform< PCollection<ElemT>, PCollection<ElemT>, PTransform<PCollection<ElemT>, PCollection<ElemT>>> application) throws IOException { RunnerApi.PTransform transformProto = PTransformTranslation.toProto( application, Collections.emptyList(), SdkComponents.create(application.getPipeline().getOptions())); checkArgument( PTransformTranslation.CREATE_VIEW_TRANSFORM_URN.equals(transformProto.getSpec().getUrn()), "Illegal attempt to extract %s from transform %s with name \"%s\" and URN \"%s\"", PCollectionView.class.getSimpleName(), application.getTransform(), application.getFullName(), transformProto.getSpec().getUrn()); return (PCollectionView<ViewT>) SerializableUtils.deserializeFromByteArray( transformProto.getSpec().getPayload().toByteArray(), PCollectionView.class.getSimpleName()); }
@Test public void testExtractionDirectFromTransform() throws Exception { SdkComponents components = SdkComponents.create(); components.registerEnvironment(Environments.createDockerEnvironment("java")); components.registerPCollection(testPCollection); AppliedPTransform<?, ?, ?> appliedPTransform = AppliedPTransform.of( "foo", PValues.expandInput(testPCollection), PValues.expandOutput(createViewTransform.getView()), createViewTransform, ResourceHints.create(), p); CreatePCollectionViewTranslation.getView((AppliedPTransform) appliedPTransform); FunctionSpec payload = PTransformTranslation.toProto(appliedPTransform, components).getSpec(); // Checks that the payload is what it should be PCollectionView<?> deserializedView = (PCollectionView<?>) SerializableUtils.deserializeFromByteArray( payload.getPayload().toByteArray(), PCollectionView.class.getSimpleName()); assertThat(deserializedView, Matchers.equalTo(createViewTransform.getView())); }
@Override public int deleteAll() { final Set<GrokPattern> grokPatterns = loadAll(); final Set<String> patternNames = grokPatterns.stream() .map(GrokPattern::name) .collect(Collectors.toSet()); final int deletedPatterns = dbCollection.remove(DBQuery.empty()).getN(); clusterBus.post(GrokPatternsDeletedEvent.create(patternNames)); return deletedPatterns; }
@Test @MongoDBFixtures("MongoDbGrokPatternServiceTest.json") public void deleteAll() { assertThat(collection.countDocuments()).isEqualTo(3); final int deletedRecords = service.deleteAll(); assertThat(deletedRecords).isEqualTo(3); assertThat(collection.countDocuments()).isEqualTo(0); verify(clusterEventBus, times(1)).post(any(GrokPatternsDeletedEvent.class)); }
@ScalarOperator(DIVIDE) @SqlType(StandardTypes.INTEGER) public static long divide(@SqlType(StandardTypes.INTEGER) long left, @SqlType(StandardTypes.INTEGER) long right) { try { return left / right; } catch (ArithmeticException e) { throw new PrestoException(DIVISION_BY_ZERO, e); } }
@Test public void testDivide() { assertFunction("INTEGER'37' / INTEGER'37'", INTEGER, 1); assertFunction("INTEGER'37' / INTEGER'17'", INTEGER, 37 / 17); assertFunction("INTEGER'17' / INTEGER'37'", INTEGER, 17 / 37); assertFunction("INTEGER'17' / INTEGER'17'", INTEGER, 1); assertInvalidFunction("INTEGER'17' / INTEGER'0'", DIVISION_BY_ZERO); }
@Override public List<IndexSpec> getIndexSpecs() { return List.copyOf(this.indexSpecs.values()); }
@Test void getIndexSpecs() { var specs = new DefaultIndexSpecs(); specs.add(primaryKeyIndexSpec(FakeExtension.class)); assertThat(specs.getIndexSpecs()).hasSize(1); }
protected void insertModelAfter(EpoxyModel<?> modelToInsert, EpoxyModel<?> modelToInsertAfter) { int modelIndex = getModelPosition(modelToInsertAfter); if (modelIndex == -1) { throw new IllegalStateException("Model is not added: " + modelToInsertAfter); } int targetIndex = modelIndex + 1; pauseModelListNotifications(); models.add(targetIndex, modelToInsert); resumeModelListNotifications(); notifyItemInserted(targetIndex); }
@Test() public void testInsertModelAfter() { TestModel firstModel = new TestModel(); testAdapter.addModels(firstModel); testAdapter.insertModelAfter(new TestModel(), firstModel); verify(observer).onItemRangeInserted(1, 1); assertEquals(2, testAdapter.models.size()); assertEquals(firstModel, testAdapter.models.get(0)); checkDifferState(); }
public static boolean isServletRequestAuthenticatorInstanceOf(Class<? extends ServletRequestAuthenticator> clazz) { final AuthCheckFilter instance = getInstance(); if (instance == null) { // We've not yet been instantiated return false; } return servletRequestAuthenticator != null && clazz.isAssignableFrom(servletRequestAuthenticator.getClass()); }
@Test public void willReturnFalseIfTheWrongServletRequestAuthenticatorIsConfigured() { AuthCheckFilter.SERVLET_REQUEST_AUTHENTICATOR.setValue(NormalUserServletAuthenticatorClass.class); new AuthCheckFilter(adminManager, loginLimitManager); assertThat(AuthCheckFilter.isServletRequestAuthenticatorInstanceOf(AdminUserServletAuthenticatorClass.class), is(false)); }
@Override public SpringCache getCache(final String name) { final RemoteCache<Object, Object> nativeCache = this.nativeCacheManager.getCache(name); if (nativeCache == null) { springCaches.remove(name); return null; } return springCaches.computeIfAbsent(name, n -> new SpringCache(nativeCache, reactive, readTimeout, writeTimeout)); }
@Test public final void getCacheShouldReturnDifferentInstancesForDifferentNames() { // When final SpringCache firstObtainedSpringCache = objectUnderTest.getCache(TEST_CACHE_NAME); final SpringCache secondObtainedSpringCache = objectUnderTest.getCache(OTHER_TEST_CACHE_NAME); // Then assertNotSame( "getCache() should have returned different SpringCache instances for different names", firstObtainedSpringCache, secondObtainedSpringCache); }
public static String calculateSha256Hex(@Nonnull byte[] data) throws NoSuchAlgorithmException { return calculateSha256Hex(data, data.length); }
@Test public void testCalculateSha256Hex() throws Exception { byte[] data = {(byte) 0}; String result = Sha256Util.calculateSha256Hex(data); assertEquals("6e340b9cffb37a989ca544e6bb780a2c78901d3fb33738768511a30617afa01d", result); }
@LiteralParameters("x") @ScalarOperator(LESS_THAN) @SqlType(StandardTypes.BOOLEAN) public static boolean lessThan(@SqlType("varchar(x)") Slice left, @SqlType("varchar(x)") Slice right) { return left.compareTo(right) < 0; }
@Test public void testLessThan() { assertFunction("'foo' < 'foo'", BOOLEAN, false); assertFunction("'foo' < 'bar'", BOOLEAN, false); assertFunction("'bar' < 'foo'", BOOLEAN, true); assertFunction("'bar' < 'bar'", BOOLEAN, false); }
public String getEcosystem(DefCveItem cve) { final List<Reference> references = Optional.ofNullable(cve) .map(DefCveItem::getCve) .map(CveItem::getReferences) .orElse(null); if (Objects.nonNull(references)) { for (Reference r : references) { final Hit<String> ecosystem = search.findFirst(r.getUrl()); if (ecosystem != null) { return ecosystem.value; } } } return null; }
@Test public void testGetEcosystemMustHandleNullCveItem() { // Given UrlEcosystemMapper mapper = new UrlEcosystemMapper(); // When String output = mapper.getEcosystem(null); // Then assertNull(output); }
@Override public void closeDiscountActivity(Long id) { // 校验存在 DiscountActivityDO activity = validateDiscountActivityExists(id); if (activity.getStatus().equals(CommonStatusEnum.DISABLE.getStatus())) { // 已关闭的活动,不能关闭噢 throw exception(DISCOUNT_ACTIVITY_CLOSE_FAIL_STATUS_CLOSED); } // 更新 DiscountActivityDO updateObj = new DiscountActivityDO().setId(id).setStatus(PromotionActivityStatusEnum.CLOSE.getStatus()); discountActivityMapper.updateById(updateObj); }
@Test public void testCloseDiscountActivity() { // mock 数据 DiscountActivityDO dbDiscountActivity = randomPojo(DiscountActivityDO.class, o -> o.setStatus(PromotionActivityStatusEnum.WAIT.getStatus())); discountActivityMapper.insert(dbDiscountActivity);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbDiscountActivity.getId(); // 调用 discountActivityService.closeDiscountActivity(id); // 校验状态 DiscountActivityDO discountActivity = discountActivityMapper.selectById(id); assertEquals(discountActivity.getStatus(), PromotionActivityStatusEnum.CLOSE.getStatus()); }
public static SlotManagerConfiguration fromConfiguration( Configuration configuration, WorkerResourceSpec defaultWorkerResourceSpec) throws ConfigurationException { final Time rpcTimeout = Time.fromDuration(configuration.get(RpcOptions.ASK_TIMEOUT_DURATION)); final Time taskManagerTimeout = Time.fromDuration(configuration.get(ResourceManagerOptions.TASK_MANAGER_TIMEOUT)); final Duration requirementCheckDelay = configuration.get(ResourceManagerOptions.REQUIREMENTS_CHECK_DELAY); final Duration declareNeededResourceDelay = configuration.get(ResourceManagerOptions.DECLARE_NEEDED_RESOURCE_DELAY); boolean waitResultConsumedBeforeRelease = configuration.get(ResourceManagerOptions.TASK_MANAGER_RELEASE_WHEN_RESULT_CONSUMED); TaskManagerLoadBalanceMode taskManagerLoadBalanceMode = TaskManagerLoadBalanceMode.loadFromConfiguration(configuration); int numSlotsPerWorker = configuration.get(TaskManagerOptions.NUM_TASK_SLOTS); int minSlotNum = configuration.get(ResourceManagerOptions.MIN_SLOT_NUM); int maxSlotNum = configuration.get(ResourceManagerOptions.MAX_SLOT_NUM); int redundantTaskManagerNum = configuration.get(ResourceManagerOptions.REDUNDANT_TASK_MANAGER_NUM); return new SlotManagerConfiguration( rpcTimeout, taskManagerTimeout, requirementCheckDelay, declareNeededResourceDelay, waitResultConsumedBeforeRelease, taskManagerLoadBalanceMode, defaultWorkerResourceSpec, numSlotsPerWorker, minSlotNum, maxSlotNum, getMinTotalCpu(configuration, defaultWorkerResourceSpec, minSlotNum), getMaxTotalCpu(configuration, defaultWorkerResourceSpec, maxSlotNum), getMinTotalMem(configuration, defaultWorkerResourceSpec, minSlotNum), getMaxTotalMem(configuration, defaultWorkerResourceSpec, maxSlotNum), redundantTaskManagerNum); }
@Test void testComputeMinMaxCpuIsInvalid() { final Configuration configuration = new Configuration(); final double minTotalCpu = 10.0; final double maxTotalCpu = 11.0; final int numSlots = 3; final double cpuCores = 3; configuration.set(ResourceManagerOptions.MIN_TOTAL_CPU, minTotalCpu); configuration.set(ResourceManagerOptions.MAX_TOTAL_CPU, maxTotalCpu); assertThatIllegalStateException() .isThrownBy( () -> SlotManagerConfiguration.fromConfiguration( configuration, new WorkerResourceSpec.Builder() .setNumSlots(numSlots) .setCpuCores(cpuCores) .build())); }
public static <T> Stream<T> fromArray(Inspector array, Function<Inspector, T> mapper) { return IntStream.range(0, array.entries()) .mapToObj(array::entry) .map(mapper); }
@Test public void test_some_elements() { var inspector = new Slime().setArray(); inspector.addString("foo"); inspector.addString("bar"); var items = SlimeStream.fromArray(inspector, Inspector::asString).toList(); assertEquals(List.of("foo", "bar"), items); }
@Override public byte[] encode(ILoggingEvent event) { StringBuilder sb = new StringBuilder(); sb.append(OPEN_OBJ); var level = event.getLevel(); if (level != null) { appenderMember(sb, "level", level.levelStr); sb.append(VALUE_SEPARATOR); } appenderMember(sb, "message", StringEscapeUtils.escapeJson(event.getFormattedMessage())); IThrowableProxy tp = event.getThrowableProxy(); String stackTrace = null; if (tp != null) { sb.append(VALUE_SEPARATOR); stackTrace = tpc.convert(event); appenderMember(sb, "stacktrace", StringEscapeUtils.escapeJson(stackTrace)); } sb.append(CLOSE_OBJ); sb.append(CoreConstants.JSON_LINE_SEPARATOR); return sb.toString().getBytes(UTF_8_CHARSET); }
@Test void should_encode_when_no_stacktrace() { var logEvent = mock(ILoggingEvent.class); when(logEvent.getLevel()).thenReturn(Level.DEBUG); when(logEvent.getFormattedMessage()).thenReturn("message"); var bytes = underTest.encode(logEvent); assertThat(new String(bytes, StandardCharsets.UTF_8)).isEqualTo("{\"level\":\"DEBUG\",\"message\":\"message\"}\n"); }
@Override public void handleRequest(RestRequest request, RequestContext requestContext, Callback<RestResponse> callback) { //This code path cannot accept content types or accept types that contain //multipart/related. This is because these types of requests will usually have very large payloads and therefore //would degrade server performance since RestRequest reads everything into memory. if (!isMultipart(request, requestContext, callback)) { _restRestLiServer.handleRequest(request, requestContext, callback); } }
@Test public void testRestRequestAttachmentsPresent() throws Exception { //This test verifies that a RestRequest sent to the RestLiServer throws an exception if the content type is multipart/related RestRequest contentTypeMultiPartRelated = new RestRequestBuilder(new URI("/statuses/abcd")) .setHeader(RestConstants.HEADER_CONTENT_TYPE, RestConstants.HEADER_VALUE_MULTIPART_RELATED).build(); Callback<RestResponse> callback = new Callback<RestResponse>() { @Override public void onSuccess(RestResponse restResponse) { fail(); } @Override public void onError(Throwable e) { assertTrue(e instanceof RestException); RestException restException = (RestException)e; RestResponse restResponse = restException.getResponse(); assertEquals(restResponse.getStatus(), 415); assertTrue(restResponse.getEntity().length() > 0); assertEquals(restResponse.getEntity().asString(Charset.defaultCharset()), "This server cannot handle requests with a content type of multipart/related"); } }; _server.handleRequest(contentTypeMultiPartRelated, new RequestContext(), callback); }
public static LocalDateTime parse(CharSequence text) { return parse(text, (DateTimeFormatter) null); }
@Test public void parseBlankTest(){ final LocalDateTime parse = LocalDateTimeUtil.parse(""); assertNull(parse); }
RuleChange findChangesAndUpdateRule(RulesDefinition.Rule ruleDef, RuleDto ruleDto) { RuleChange ruleChange = new RuleChange(ruleDto); boolean ruleMerged = mergeRule(ruleDef, ruleDto, ruleChange); boolean debtDefinitionsMerged = mergeDebtDefinitions(ruleDef, ruleDto); boolean tagsMerged = mergeTags(ruleDef, ruleDto); boolean securityStandardsMerged = mergeSecurityStandards(ruleDef, ruleDto); boolean educationPrinciplesMerged = mergeEducationPrinciples(ruleDef, ruleDto); ruleChange.ruleDefinitionChanged = ruleMerged || debtDefinitionsMerged || tagsMerged || securityStandardsMerged || educationPrinciplesMerged; return ruleChange; }
@Test public void findChangesAndUpdateRule_whenImpactsChanged_thenDontIncludeUnchangedImpacts() { RulesDefinition.Rule ruleDef = getDefaultRuleDef(); when(ruleDef.cleanCodeAttribute()).thenReturn(CleanCodeAttribute.CLEAR); Map<SoftwareQuality, Severity> newImpacts = Map.of(SoftwareQuality.MAINTAINABILITY, Severity.LOW, SoftwareQuality.SECURITY, Severity.HIGH); when(ruleDef.defaultImpacts()).thenReturn(newImpacts); RuleDto rule = getDefaultRuleDto(); when(rule.getCleanCodeAttribute()).thenReturn(CleanCodeAttribute.COMPLETE); Set<ImpactDto> oldImpacts = Set.of(new ImpactDto(SoftwareQuality.RELIABILITY, Severity.LOW), new ImpactDto(SoftwareQuality.SECURITY, Severity.HIGH)); when(rule.getDefaultImpacts()).thenReturn(oldImpacts); StartupRuleUpdater.RuleChange changesAndUpdateRule = underTest.findChangesAndUpdateRule(ruleDef, rule); assertTrue(changesAndUpdateRule.hasRuleDefinitionChanged()); assertThat(changesAndUpdateRule.getPluginRuleUpdate().getNewImpacts()).containsOnly(Map.entry(SoftwareQuality.MAINTAINABILITY, Severity.LOW)); assertThat(changesAndUpdateRule.getPluginRuleUpdate().getOldImpacts()).containsOnly(Map.entry(SoftwareQuality.RELIABILITY, Severity.LOW)); }
@Override public void moveTo(long position) { if (position < 0 || length() < position) { throw new IllegalArgumentException("Position out of the bounds of the file!"); } fp = position; }
@Test public void moveTo() throws IOException { int readPosition = (int) len - 10; byte[] buff = new byte[1]; cs.moveTo(readPosition); cs.open(); int n = cs.read(buff); assertEquals(text[readPosition], buff[0]); assertEquals(buff.length, n); }
public long timeOfLastReset() { List<Status> statusList = sm.getCopyOfStatusList(); if (statusList == null) return -1; int len = statusList.size(); for (int i = len - 1; i >= 0; i--) { Status s = statusList.get(i); if (CoreConstants.RESET_MSG_PREFIX.equals(s.getMessage())) { return s.getDate(); } } return -1; }
@Test public void emptyStatusListShouldResultInNotFound() { assertEquals(-1, statusUtil.timeOfLastReset()); }
public static <T> T retryOnException(Supplier<T> supplier, int maxRetries) { return retryOnException(supplier, maxRetries, 20L); }
@Test void retryOnExceptionForSupplier() { assertThatCode(() -> Exceptions.retryOnException(() -> "my string from supplier", 5)).doesNotThrowAnyException(); assertThatCode(() -> Exceptions.retryOnException(new SupplierThrowingRuntimeException(), 5)).isInstanceOf(StorageException.class); }
public <T> T fromXmlPartial(String partial, Class<T> o) throws Exception { return fromXmlPartial(toInputStream(partial, UTF_8), o); }
@Test void shouldLoadAllowOnlySuccessOnManualApprovalType() throws Exception { Approval approval = xmlLoader.fromXmlPartial("<approval type=\"manual\" allowOnlyOnSuccess=\"true\" />", Approval.class); assertThat(approval.getType()).isEqualTo("manual"); assertThat(approval.isAllowOnlyOnSuccess()).isEqualTo(true); }
public void addHeaders(Map<String, String> headersToAdd) { headers.putAll(headersToAdd); }
@Test public void testAddHeaders() { String headerName1 = "customized_header1"; String headerValue1 = "customized_value1"; String headerName2 = "customized_header2"; String headerValue2 = "customized_value2"; HashMap<String, String> headersToAdd = new HashMap<>(); headersToAdd.put(headerName1, headerValue1); headersToAdd.put(headerName2, headerValue2); httpService.addHeaders(headersToAdd); assertTrue(httpService.getHeaders().get(headerName1).equals(headerValue1)); assertTrue(httpService.getHeaders().get(headerName2).equals(headerValue2)); }
@Override public void recordPartitionInfo(int partitionIndex, ResultPartitionBytes partitionBytes) { // Once all partitions are finished, we can convert the subpartition bytes to aggregated // value to reduce the space usage, because the distribution of source splits does not // affect the distribution of data consumed by downstream tasks of ALL_TO_ALL edges(Hashing // or Rebalancing, we do not consider rare cases such as custom partitions here). if (aggregatedSubpartitionBytes == null) { super.recordPartitionInfo(partitionIndex, partitionBytes); if (subpartitionBytesByPartitionIndex.size() == numOfPartitions) { long[] aggregatedBytes = new long[numOfSubpartitions]; subpartitionBytesByPartitionIndex .values() .forEach( subpartitionBytes -> { checkState(subpartitionBytes.length == numOfSubpartitions); for (int i = 0; i < subpartitionBytes.length; ++i) { aggregatedBytes[i] += subpartitionBytes[i]; } }); this.aggregatedSubpartitionBytes = Arrays.stream(aggregatedBytes).boxed().collect(Collectors.toList()); this.subpartitionBytesByPartitionIndex.clear(); } } }
@Test void testGetBytesWithPartialPartitionInfos() { AllToAllBlockingResultInfo resultInfo = new AllToAllBlockingResultInfo(new IntermediateDataSetID(), 2, 2, false); resultInfo.recordPartitionInfo(0, new ResultPartitionBytes(new long[] {32L, 64L})); assertThatThrownBy(resultInfo::getNumBytesProduced) .isInstanceOf(IllegalStateException.class); assertThatThrownBy(resultInfo::getAggregatedSubpartitionBytes) .isInstanceOf(IllegalStateException.class); }
@ConstantFunction(name = "add", argTypes = {SMALLINT, SMALLINT}, returnType = SMALLINT, isMonotonic = true) public static ConstantOperator addSmallInt(ConstantOperator first, ConstantOperator second) { return ConstantOperator.createSmallInt((short) Math.addExact(first.getSmallint(), second.getSmallint())); }
@Test public void addSmallInt() { assertEquals(20, ScalarOperatorFunctions.addSmallInt(O_SI_10, O_SI_10).getSmallint()); }
@VisibleForTesting AdminUserDO validateUserExists(Long id) { if (id == null) { return null; } AdminUserDO user = userMapper.selectById(id); if (user == null) { throw exception(USER_NOT_EXISTS); } return user; }
@Test public void testValidateUserExists_notExists() { assertServiceException(() -> userService.validateUserExists(randomLongId()), USER_NOT_EXISTS); }
protected PackageModel getPackageModel(PackageDescr packageDescr, PackageRegistry pkgRegistry, String pkgName) { return packageModels.getPackageModel(packageDescr, pkgRegistry, pkgName); }
@Test public void getPackageModelWithPkgUUID() { String pkgUUID = generateUUID(); PackageDescr packageDescr = getPackageDescr(pkgUUID); PackageModel retrieved = modelBuilder.getPackageModel(packageDescr, packageRegistry, internalKnowledgePackage.getName()); assertThat(retrieved).isNotNull(); assertThat(retrieved.getPackageUUID()).isEqualTo(pkgUUID); }
public boolean shouldKeepReport() { return configuration.getBoolean(KEEP_REPORT_PROP_KEY).orElse(false) || configuration.getBoolean(VERBOSE_KEY).orElse(false); }
@Test public void should_define_keep_report() { settings.setProperty("sonar.scanner.keepReport", "true"); assertThat(underTest.shouldKeepReport()).isTrue(); }
@Override public List<SysMenuVO> menuList(SysMenuListDTO dto) { QueryWrapper wrapper = QueryWrapper.create() .eq(SysMenu::getDelFlag, "F") /* .orderBy(SYS_MENU.DEEP.asc())*/ .orderBy(SysMenuTableDef.SYS_MENU.SORT.asc()); if (!dto.isShowButton()) { wrapper.ne(SysMenu::getMenuTypeCd, "1002003"); } // 菜单全部数据 List<SysMenu> list = list(wrapper); List<SysMenuVO> treeList = new ArrayList<>(); // 构建树形 for (SysMenuVO rootNode : getRootNodes(list)) { SysMenuVO menuVO = BeanCopyUtils.springCopy(rootNode, SysMenuVO.class); SysMenuVO.Meta meta = BeanCopyUtils.springCopy(rootNode, SysMenuVO.Meta.class); menuVO.setMeta(meta); SysMenuVO childrenNode = getChildrenNode(menuVO, list); treeList.add(childrenNode); } return treeList; }
@Test void menuList() { SysMenuListDTO dto = new SysMenuListDTO(); List<SysMenuVO> sysMenuVOS = sysMenuService.menuList(dto); System.out.println("sysMenuVOS ==" + JsonUtils.toJsonString(sysMenuVOS)); }
@Deprecated public static boolean isNullOrEmpty(String str) { return str == null || str.isEmpty(); }
@Test public void isNullOrEmpty() { String string = "null"; Assert.assertFalse(StringUtil.isEmpty(string)); }
@Override public Object read(final MySQLPacketPayload payload, final boolean unsigned) throws SQLException { int length = payload.readInt1(); switch (length) { case 0: throw new SQLFeatureNotSupportedException("Can not support date format if year, month, day is absent."); case 4: return getTimestampForDate(payload); case 7: return getTimestampForDatetime(payload); case 11: Timestamp result = getTimestampForDatetime(payload); result.setNanos(payload.readInt4() * 1000); return result; default: throw new SQLFeatureNotSupportedException(String.format("Wrong length `%d` of MYSQL_TYPE_TIME", length)); } }
@Test void assertReadWithZeroByte() { assertThrows(SQLFeatureNotSupportedException.class, () -> new MySQLDateBinaryProtocolValue().read(payload, false)); }
public static List<?> convertToList(Schema schema, Object value) { return convertToArray(ARRAY_SELECTOR_SCHEMA, value); }
@Test public void shouldFailToConvertToListFromStringWithExtraDelimiters() { assertThrows(DataException.class, () -> Values.convertToList(Schema.STRING_SCHEMA, "[1, 2, 3,,,]")); }
@Override public void add(T item) { final int sizeAtTimeOfAdd; synchronized (items) { items.add(item); sizeAtTimeOfAdd = items.size(); } /* WARNING: It is possible that the item that was just added to the list has been processed by an existing idle task at this point. By rescheduling the following timers, it is possible that a superfluous maxTask is generated now OR that the idle task and max task are scheduled at their specified delays. This could result in calls to processItems sooner than expected. */ // Did we hit the max item threshold? if (sizeAtTimeOfAdd >= maxItems) { if (maxIdleMillis < maxBatchMillis) { cancelTask(idleTask); } rescheduleTask(maxTask, 0 /* now! */); } else { // Otherwise, schedule idle task and if this is a first item // also schedule the max batch age task. if (maxIdleMillis < maxBatchMillis) { rescheduleTask(idleTask, maxIdleMillis); } if (sizeAtTimeOfAdd == 1) { rescheduleTask(maxTask, maxBatchMillis); } } }
@Test public void timeTrigger() { TestAccumulator accumulator = new TestAccumulator(); accumulator.add(new TestItem("a")); timer.advanceTimeMillis(30, SHORT_REAL_TIME_DELAY); assertTrue("should not have fired yet", accumulator.batch.isEmpty()); accumulator.add(new TestItem("b")); timer.advanceTimeMillis(30, SHORT_REAL_TIME_DELAY); assertTrue("should not have fired yet", accumulator.batch.isEmpty()); accumulator.add(new TestItem("c")); timer.advanceTimeMillis(30, SHORT_REAL_TIME_DELAY); assertTrue("should not have fired yet", accumulator.batch.isEmpty()); accumulator.add(new TestItem("d")); timer.advanceTimeMillis(10, LONG_REAL_TIME_DELAY); assertFalse("should have fired", accumulator.batch.isEmpty()); assertEquals("incorrect batch", "abcd", accumulator.batch); }
public Entry<Object, Object> project(JetSqlRow row) { keyTarget.init(); valueTarget.init(); for (int i = 0; i < row.getFieldCount(); i++) { Object value = getToConverter(types[i]).convert(row.get(i)); injectors[i].set(value); } Object key = keyTarget.conclude(); if (key == null && failOnNulls) { throw QueryException.error("Cannot write NULL to '__key' field. " + "Note that NULL is used also if your INSERT/SINK command doesn't write to '__key' at all."); } Object value = valueTarget.conclude(); if (value == null && failOnNulls) { throw QueryException.error("Cannot write NULL to 'this' field. " + "Note that NULL is used also if your INSERT/SINK command doesn't write to 'this' at all."); } return entry(key, value); }
@Test public void test_projectValueNullNotAllowed() { KvProjector projector = new KvProjector( new QueryPath[]{QueryPath.KEY_PATH, QueryPath.VALUE_PATH}, new QueryDataType[]{QueryDataType.INT, QueryDataType.INT}, new MultiplyingTarget(), new NullTarget(), true ); assertThatThrownBy(() -> projector.project(new JetSqlRow(TEST_SS, new Object[]{1, 2}))) .isInstanceOf(QueryException.class) .hasMessageContaining("Cannot write NULL to 'this' field"); }
public static BigDecimal cast(final Integer value, final int precision, final int scale) { if (value == null) { return null; } return cast(value.longValue(), precision, scale); }
@Test public void shouldNotCastStringTooNegative() { // When: final Exception e = assertThrows( ArithmeticException.class, () -> cast("-10", 2, 1) ); // Then: assertThat(e.getMessage(), containsString("Numeric field overflow")); }
@Override public boolean match(Message msg, StreamRule rule) { Double msgVal = getDouble(msg.getField(rule.getField())); if (msgVal == null) { return false; } Double ruleVal = getDouble(rule.getValue()); if (ruleVal == null) { return false; } return rule.getInverted() ^ (msgVal > ruleVal); }
@Test public void testSuccessfullInvertedMatch() { StreamRule rule = getSampleRule(); rule.setValue("10"); rule.setInverted(true); Message msg = getSampleMessage(); msg.addField("something", "4"); StreamRuleMatcher matcher = getMatcher(rule); assertTrue(matcher.match(msg, rule)); }
public static void toJson(PartitionSpec spec, JsonGenerator generator) throws IOException { toJson(spec.toUnbound(), generator); }
@TestTemplate public void testToJsonForV1Table() { String expected = "{\n" + " \"spec-id\" : 0,\n" + " \"fields\" : [ {\n" + " \"name\" : \"data_bucket\",\n" + " \"transform\" : \"bucket[16]\",\n" + " \"source-id\" : 2,\n" + " \"field-id\" : 1000\n" + " } ]\n" + "}"; assertThat(PartitionSpecParser.toJson(table.spec(), true)).isEqualTo(expected); PartitionSpec spec = PartitionSpec.builderFor(table.schema()).bucket("id", 8).bucket("data", 16).build(); table.ops().commit(table.ops().current(), table.ops().current().updatePartitionSpec(spec)); expected = "{\n" + " \"spec-id\" : 1,\n" + " \"fields\" : [ {\n" + " \"name\" : \"id_bucket\",\n" + " \"transform\" : \"bucket[8]\",\n" + " \"source-id\" : 1,\n" + " \"field-id\" : 1000\n" + " }, {\n" + " \"name\" : \"data_bucket\",\n" + " \"transform\" : \"bucket[16]\",\n" + " \"source-id\" : 2,\n" + " \"field-id\" : 1001\n" + " } ]\n" + "}"; assertThat(PartitionSpecParser.toJson(table.spec(), true)).isEqualTo(expected); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof DefaultDisjointPath) { final DefaultDisjointPath other = (DefaultDisjointPath) obj; return (Objects.equals(this.path1, other.path1) && Objects.equals(this.path2, other.path2)) || (Objects.equals(this.path1, other.path2) && Objects.equals(this.path2, other.path1)); } return false; }
@Test public void testEquals() { new EqualsTester() .addEqualityGroup(disjointPath1, sameAsDisjointPath1, disjointPath2) .addEqualityGroup(disjointPath3) .addEqualityGroup(disjointPath4) .testEquals(); }
@Override public boolean checkExists(String path) { try { if (client.checkExists().forPath(path) != null) { return true; } } catch (Exception ignored) { } return false; }
@Test void testCheckExists() { String path = "/dubbo/org.apache.dubbo.demo.DemoService/providers"; curatorClient.create(path, false, true); assertThat(curatorClient.checkExists(path), is(true)); assertThat(curatorClient.checkExists(path + "/noneexits"), is(false)); }
@Override public void processElement2(StreamRecord<IN2> element) throws Exception { collector.setTimestamp(element); rwContext.setElement(element); userFunction.processBroadcastElement(element.getValue(), rwContext, collector); rwContext.setElement(null); }
@Test void testNoKeyedStateOnBroadcastSide() throws Exception { try (TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness = getInitializedTestHarness( BasicTypeInfo.STRING_TYPE_INFO, new IdentityKeySelector<>(), new KeyedBroadcastProcessFunction<String, String, Integer, String>() { private static final long serialVersionUID = -1725365436500098384L; private final ValueStateDescriptor<String> valueState = new ValueStateDescriptor<>( "any", BasicTypeInfo.STRING_TYPE_INFO); @Override public void processBroadcastElement( Integer value, Context ctx, Collector<String> out) throws Exception { assertThatThrownBy( () -> getRuntimeContext() .getState(valueState) .value()) .isInstanceOf(NullPointerException.class) .hasMessage( "No key set. This method should not be called outside of a keyed context."); } @Override public void processElement( String value, ReadOnlyContext ctx, Collector<String> out) throws Exception { // do nothing } })) { testHarness.processWatermark1(new Watermark(10L)); testHarness.processWatermark2(new Watermark(10L)); testHarness.processElement2(new StreamRecord<>(5, 12L)); } }
public static RestartBackoffTimeStrategy.Factory createRestartBackoffTimeStrategyFactory( final RestartStrategies.RestartStrategyConfiguration jobRestartStrategyConfiguration, final Configuration jobConfiguration, final Configuration clusterConfiguration, final boolean isCheckpointingEnabled) { checkNotNull(jobRestartStrategyConfiguration); checkNotNull(jobConfiguration); checkNotNull(clusterConfiguration); return getJobRestartStrategyFactory(jobRestartStrategyConfiguration) .orElse( getRestartStrategyFactoryFromConfig(jobConfiguration) .orElse( (getRestartStrategyFactoryFromConfig(clusterConfiguration) .orElse( getDefaultRestartStrategyFactory( isCheckpointingEnabled))))); }
@Test void testFixedDelayStrategySpecifiedInJobConfig() { final Configuration jobConf = new Configuration(); jobConf.set(RestartStrategyOptions.RESTART_STRATEGY, FIXED_DELAY.getMainValue()); final Configuration clusterConf = new Configuration(); clusterConf.set(RestartStrategyOptions.RESTART_STRATEGY, EXPONENTIAL_DELAY.getMainValue()); final RestartBackoffTimeStrategy.Factory factory = RestartBackoffTimeStrategyFactoryLoader.createRestartBackoffTimeStrategyFactory( DEFAULT_JOB_LEVEL_RESTART_CONFIGURATION, jobConf, clusterConf, false); assertThat(factory) .isInstanceOf( FixedDelayRestartBackoffTimeStrategy .FixedDelayRestartBackoffTimeStrategyFactory.class); }
public void parse(DataByteArrayInputStream input, int readSize) throws Exception { if (currentParser == null) { currentParser = initializeHeaderParser(); } // Parser stack will run until current incoming data has all been consumed. currentParser.parse(input, readSize); }
@Test public void testConnectWithCredentialsBackToBack() throws Exception { CONNECT connect = new CONNECT(); connect.cleanSession(false); connect.clientId(new UTF8Buffer("test")); connect.userName(new UTF8Buffer("user")); connect.password(new UTF8Buffer("pass")); DataByteArrayOutputStream output = new DataByteArrayOutputStream(); wireFormat.marshal(connect.encode(), output); wireFormat.marshal(connect.encode(), output); Buffer marshalled = output.toBuffer(); DataByteArrayInputStream input = new DataByteArrayInputStream(marshalled); codec.parse(input, marshalled.length()); assertTrue(!frames.isEmpty()); assertEquals(2, frames.size()); for (MQTTFrame frame : frames) { connect = new CONNECT().decode(frame); LOG.info("Unmarshalled: {}", connect); assertFalse(connect.cleanSession()); assertEquals("user", connect.userName().toString()); assertEquals("pass", connect.password().toString()); assertEquals("test", connect.clientId().toString()); } }
public static Subject.Factory<Re2jStringSubject, String> re2jString() { return Re2jStringSubject.FACTORY; }
@Test public void containsMatch_string_succeeds() { assertAbout(re2jString()).that("this is a hello world").containsMatch(PATTERN_STR); }
@Override protected void validateDataImpl(TenantId tenantId, Tenant tenant) { validateString("Tenant title", tenant.getTitle()); if (!StringUtils.isEmpty(tenant.getEmail())) { validateEmail(tenant.getEmail()); } }
@Test void testValidateNameInvocation() { Tenant tenant = new Tenant(); tenant.setTitle("Monster corporation ©"); tenant.setEmail("support@thingsboard.io"); validator.validateDataImpl(tenantId, tenant); verify(validator).validateString("Tenant title", tenant.getTitle()); }
@Override public boolean contains(Object objectToCheck) { return contains(objectToCheck, objectToCheck.hashCode()); }
@Test public void testContains() { final OAHashSet<Integer> set = new OAHashSet<>(8); populateSet(set, 10); for (int i = 0; i < 10; i++) { final boolean contained = set.contains(i); assertTrue("Element " + i + " should be contained", contained); } }
public static ByteArrayClassLoader compile( ClassLoader parentClassLoader, CompileUnit... compileUnits) { final Map<String, byte[]> classes = toBytecode(parentClassLoader, compileUnits); // Set up a class loader that finds and defined the generated classes. return new ByteArrayClassLoader(classes, parentClassLoader); }
@Test public void benchmark() { CompileUnit unit = new CompileUnit( "demo.pkg1", "A", ("" + "package demo.pkg1;\n" + "public class A {\n" + " public static String hello() { return \"HELLO\"; }\n" + "}")); // Since janino is not called frequently, we test only 50 times. int iterNums = 50; for (int i = 0; i < iterNums; i++) { JaninoUtils.compile(Thread.currentThread().getContextClassLoader(), unit); } long startTime = System.nanoTime(); for (int i = 0; i < iterNums; i++) { JaninoUtils.compile(Thread.currentThread().getContextClassLoader(), unit); } long duration = System.nanoTime() - startTime; System.out.printf( "Total cost %f ms, average time is %f ms", (double) duration / 1000_000, (double) duration / iterNums / 1000_000); }
@Override public Double getValue() { return getRatio().getValue(); }
@Test public void handlesNaNDenominators() { final RatioGauge nan = new RatioGauge() { @Override protected Ratio getRatio() { return Ratio.of(10, Double.NaN); } }; assertThat(nan.getValue()) .isNaN(); }
public static byte[] intToByteArray(int value, int len) { if (len == 2 || len == 4) { if (len == 2 && (value < Short.MIN_VALUE || value > Short.MAX_VALUE)) { throw new IllegalArgumentException("Value outside range for signed short int."); } else { byte[] b = new byte[len]; for (int i = 0; i < len; i++) { int offset = (b.length - 1 - i) * 8; b[i] = (byte) ((value >>> offset) & 0xFF); } return b; } } else { throw new IllegalArgumentException( "Length must be specified as either 2 or 4."); } }
@Test public void testIntToByteArray() throws Exception { byte[] ba; int len = 2; ba = TCPClientDecorator.intToByteArray(0, len); assertEquals(len, ba.length); assertEquals(0, ba[0]); assertEquals(0, ba[1]); ba = TCPClientDecorator.intToByteArray(15, len); assertEquals(len, ba.length); assertEquals(0, ba[0]); assertEquals(15, ba[1]); ba = TCPClientDecorator.intToByteArray(255, len); assertEquals(len, ba.length); assertEquals(0, ba[0]); assertEquals(-1, ba[1]); ba = TCPClientDecorator.intToByteArray(256, len); assertEquals(len, ba.length); assertEquals(1, ba[0]); assertEquals(0, ba[1]); ba = TCPClientDecorator.intToByteArray(-1, len); assertEquals(len, ba.length); assertEquals(-1, ba[0]); assertEquals(-1, ba[1]); ba = TCPClientDecorator.intToByteArray(Short.MAX_VALUE, len); assertEquals(len, ba.length); assertEquals(127, ba[0]); assertEquals(-1, ba[1]); ba = TCPClientDecorator.intToByteArray(Short.MIN_VALUE, len); assertEquals(len, ba.length); assertEquals(-128, ba[0]); assertEquals(0, ba[1]); try { ba = TCPClientDecorator.intToByteArray(Short.MIN_VALUE-1, len); fail(); } catch (IllegalArgumentException iae) { } try { ba = TCPClientDecorator.intToByteArray(Short.MAX_VALUE+1, len); fail(); } catch (IllegalArgumentException iae) { } len = 4; ba = TCPClientDecorator.intToByteArray(0, len); assertEquals(len, ba.length); assertEquals(0, ba[0]); assertEquals(0, ba[1]); assertEquals(0, ba[2]); assertEquals(0, ba[3]); ba = TCPClientDecorator.intToByteArray(15, len); assertEquals(len, ba.length); assertEquals(0, ba[0]); assertEquals(0, ba[1]); assertEquals(0, ba[2]); assertEquals(15, ba[3]); ba = TCPClientDecorator.intToByteArray(255, len); assertEquals(len, ba.length); assertEquals(0, ba[0]); assertEquals(0, ba[1]); assertEquals(0, ba[2]); assertEquals(-1, ba[3]); ba = TCPClientDecorator.intToByteArray(-1, len); assertEquals(len, ba.length); assertEquals(-1, ba[0]); assertEquals(-1, ba[1]); assertEquals(-1, ba[2]); assertEquals(-1, ba[3]); ba = TCPClientDecorator.intToByteArray(256, len); assertEquals(len, ba.length); assertEquals(0, ba[0]); assertEquals(0, ba[1]); assertEquals(1, ba[2]); assertEquals(0, ba[3]); ba = TCPClientDecorator.intToByteArray(65535, len); assertEquals(len, ba.length); assertEquals(0, ba[0]); assertEquals(0, ba[1]); assertEquals(-1, ba[2]); assertEquals(-1, ba[3]); ba = TCPClientDecorator.intToByteArray(65536, len); assertEquals(len, ba.length); assertEquals(0, ba[0]); assertEquals(1, ba[1]); assertEquals(0, ba[2]); assertEquals(0, ba[3]); ba = TCPClientDecorator.intToByteArray(Integer.MIN_VALUE, len); assertEquals(len, ba.length); assertEquals(-128, ba[0]); assertEquals(0, ba[1]); assertEquals(0, ba[2]); assertEquals(0, ba[3]); ba = TCPClientDecorator.intToByteArray(Integer.MAX_VALUE, len); assertEquals(len, ba.length); assertEquals(127, ba[0]); assertEquals(-1, ba[1]); assertEquals(-1, ba[2]); assertEquals(-1, ba[3]); // Check illegal array lengths try { ba = TCPClientDecorator.intToByteArray(0, 0); fail(); } catch (IllegalArgumentException iae) { } try { ba = TCPClientDecorator.intToByteArray(0, 1); fail(); } catch (IllegalArgumentException iae) { } try { ba = TCPClientDecorator.intToByteArray(0, 3); fail(); } catch (IllegalArgumentException iae) { } try { TCPClientDecorator.intToByteArray(0, 5); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException iae) { } }
public static TopicMessageType getMessageType(SendMessageRequestHeader requestHeader) { Map<String, String> properties = MessageDecoder.string2messageProperties(requestHeader.getProperties()); String traFlag = properties.get(MessageConst.PROPERTY_TRANSACTION_PREPARED); TopicMessageType topicMessageType = TopicMessageType.NORMAL; if (Boolean.parseBoolean(traFlag)) { topicMessageType = TopicMessageType.TRANSACTION; } else if (properties.containsKey(MessageConst.PROPERTY_SHARDING_KEY)) { topicMessageType = TopicMessageType.FIFO; } else if (properties.get("__STARTDELIVERTIME") != null || properties.get(MessageConst.PROPERTY_DELAY_TIME_LEVEL) != null || properties.get(MessageConst.PROPERTY_TIMER_DELIVER_MS) != null || properties.get(MessageConst.PROPERTY_TIMER_DELAY_SEC) != null || properties.get(MessageConst.PROPERTY_TIMER_DELAY_MS) != null) { topicMessageType = TopicMessageType.DELAY; } return topicMessageType; }
@Test public void testGetMessageTypeWithTransactionFlagButOtherPropertiesPresent() { SendMessageRequestHeader requestHeader = new SendMessageRequestHeader(); Map<String, String> map = new HashMap<>(); map.put(MessageConst.PROPERTY_TRANSACTION_PREPARED, "true"); map.put(MessageConst.PROPERTY_SHARDING_KEY, "shardingKey"); requestHeader.setProperties(MessageDecoder.messageProperties2String(map)); TopicMessageType result = BrokerMetricsManager.getMessageType(requestHeader); assertThat(TopicMessageType.TRANSACTION).isEqualTo(result); }
@Override public TreeEntry<K, V> get(Object key) { return nodes.get(key); }
@Test public void putAllNodeTest() { final ForestMap<String, Map<String, String>> map = new LinkedForestMap<>(false); final Map<String, String> aMap = MapBuilder.<String, String> create() .put("pid", null) .put("id", "a") .build(); final Map<String, String> bMap = MapBuilder.<String, String> create() .put("pid", "a") .put("id", "b") .build(); final Map<String, String> cMap = MapBuilder.<String, String> create() .put("pid", "b") .put("id", "c") .build(); map.putAllNode(Arrays.asList(aMap, bMap, cMap), m -> m.get("id"), m -> m.get("pid"), true); final TreeEntry<String, Map<String, String>> a = map.get("a"); assertNotNull(a); final TreeEntry<String, Map<String, String>> b = map.get("b"); assertNotNull(b); final TreeEntry<String, Map<String, String>> c = map.get("c"); assertNotNull(c); assertNull(a.getDeclaredParent()); assertEquals(a, b.getDeclaredParent()); assertEquals(b, c.getDeclaredParent()); assertEquals(aMap, a.getValue()); assertEquals(bMap, b.getValue()); assertEquals(cMap, c.getValue()); }
public static <T> RestResponse<T> toRestResponse( final ResponseWithBody resp, final String path, final Function<ResponseWithBody, T> mapper ) { final int statusCode = resp.getResponse().statusCode(); return statusCode == OK.code() ? RestResponse.successful(statusCode, mapper.apply(resp)) : createErrorResponse(path, resp); }
@Test public void shouldCreateRestResponseFromSuccessfulResponse() { // Given: when(httpClientResponse.statusCode()).thenReturn(OK.code()); // When: final RestResponse<KsqlEntityList> restResponse = KsqlClientUtil.toRestResponse(response, PATH, mapper); // Then: assertThat("is successful", restResponse.isSuccessful()); assertThat(restResponse.getStatusCode(), is(OK.code())); assertThat(restResponse.getResponse(), sameInstance(entities)); }
public Collection<DevConsole> loadDevConsoles() { return loadDevConsoles(false); }
@Test public void testLoaderForce() { DefaultDevConsolesLoader loader = new DefaultDevConsolesLoader(context); Collection<DevConsole> col = loader.loadDevConsoles(true); Assertions.assertTrue(col.size() > 3); }
@Override public boolean exists( ValueMetaInterface meta ) { return ( meta != null ) && searchValueMeta( meta.getName() ) != null; }
@Test public void testExists() { assertTrue( rowMeta.exists( string ) ); assertTrue( rowMeta.exists( date ) ); assertTrue( rowMeta.exists( integer ) ); }
synchronized public Value addFirst(Transaction tx, Key key, Value value) throws IOException { assertLoaded(); getHead(tx).addFirst(tx, key, value); size.incrementAndGet(); flushCache(); return null; }
@Test(timeout=60000) public void testAddFirst() throws Exception { createPageFileAndIndex(100); ListIndex<String, Long> listIndex = ((ListIndex<String, Long>) this.index); this.index.load(tx); tx.commit(); tx = pf.tx(); // put is add last doInsert(10); listIndex.addFirst(tx, key(10), (long) 10); listIndex.addFirst(tx, key(11), (long) 11); tx.commit(); tx = pf.tx(); int counter = 11; Iterator<Map.Entry<String, Long>> iterator = index.iterator(tx); assertEquals(key(counter), iterator.next().getKey()); counter--; assertEquals(key(counter), iterator.next().getKey()); counter--; int count = 0; while (iterator.hasNext() && count < counter) { Map.Entry<String, Long> entry = iterator.next(); assertEquals(key(count), entry.getKey()); assertEquals(count, (long) entry.getValue()); count++; } tx.commit(); }
@Override public void open(InitializationContext context) throws IOException { try { PartitionTempFileManager fileManager = new PartitionTempFileManager( fsFactory, stagingPath, context.getTaskNumber(), context.getAttemptNumber(), outputFileConfig); PartitionWriter.Context<T> writerContext = new PartitionWriter.Context<>(parameters, formatFactory); writer = PartitionWriterFactory.<T>get( partitionColumns.length - staticPartitions.size() > 0, dynamicGrouped, staticPartitions) .create( writerContext, fileManager, computer, new PartitionWriter.DefaultPartitionWriterListener()); } catch (Exception e) { throw new TableException("Exception in open", e); } }
@Test void testClosingWithoutInput() throws Exception { try (OneInputStreamOperatorTestHarness<Row, Object> testHarness = createTestHarness(createSinkFormat(false, false, false, new LinkedHashMap<>()))) { testHarness.setup(); testHarness.open(); } }
public List<Stream> match(Message message) { final Set<Stream> result = Sets.newHashSet(); final Set<String> blackList = Sets.newHashSet(); for (final Rule rule : rulesList) { if (blackList.contains(rule.getStreamId())) { continue; } final StreamRule streamRule = rule.getStreamRule(); final StreamRuleType streamRuleType = streamRule.getType(); final Stream.MatchingType matchingType = rule.getMatchingType(); if (!ruleTypesNotNeedingFieldPresence.contains(streamRuleType) && !message.hasField(streamRule.getField())) { if (matchingType == Stream.MatchingType.AND) { result.remove(rule.getStream()); // blacklist stream because it can't match anymore blackList.add(rule.getStreamId()); } continue; } final Stream stream; if (streamRuleType != StreamRuleType.REGEX) { stream = rule.match(message); } else { stream = rule.matchWithTimeOut(message, streamProcessingTimeout, TimeUnit.MILLISECONDS); } if (stream == null) { if (matchingType == Stream.MatchingType.AND) { result.remove(rule.getStream()); // blacklist stream because it can't match anymore blackList.add(rule.getStreamId()); } } else { result.add(stream); if (matchingType == Stream.MatchingType.OR) { // blacklist stream because it is already matched blackList.add(rule.getStreamId()); } } } final Stream defaultStream = defaultStreamProvider.get(); boolean alreadyRemovedDefaultStream = false; for (Stream stream : result) { if (stream.getRemoveMatchesFromDefaultStream()) { if (alreadyRemovedDefaultStream || message.removeStream(defaultStream)) { alreadyRemovedDefaultStream = true; if (LOG.isTraceEnabled()) { LOG.trace("Successfully removed default stream <{}> from message <{}>", defaultStream.getId(), message.getId()); } } else { // A previously executed message processor (or Illuminate) has likely already removed the // default stream from the message. Now, the message has matched a stream in the Graylog // MessageFilterChain, and the matching stream is also set to remove the default stream. // This is usually from user-defined stream rules, and is generally not a problem. cannotRemoveDefaultMeter.inc(); if (LOG.isTraceEnabled()) { LOG.trace("Couldn't remove default stream <{}> from message <{}>", defaultStream.getId(), message.getId()); } } } } return ImmutableList.copyOf(result); }
@Test public void testOrMatching() { final String dummyField = "dummyField"; final String dummyValue = "dummyValue"; final Stream stream = mock(Stream.class); when(stream.getMatchingType()).thenReturn(Stream.MatchingType.OR); final StreamRule streamRule1 = getStreamRuleMock("StreamRule1Id", StreamRuleType.EXACT, dummyField, dummyValue); final StreamRule streamRule2 = getStreamRuleMock("StreamRule2Id", StreamRuleType.EXACT, dummyField, "not" + dummyValue); when(stream.getStreamRules()).thenReturn(Lists.newArrayList(streamRule1, streamRule2)); final Message message = mock(Message.class); when(message.getField(eq(dummyField))).thenReturn(dummyValue); final StreamRouterEngine engine = newEngine(Lists.newArrayList(stream)); final List<Stream> result = engine.match(message); assertThat(result).hasSize(1); assertThat(result).contains(stream); }
public LockResource lockExclusive(StateLockOptions lockOptions) throws TimeoutException, InterruptedException, IOException { return lockExclusive(lockOptions, null); }
@Test public void testGraceMode_Timeout() throws Throwable { configureInterruptCycle(false); // The state-lock instance. StateLockManager stateLockManager = new StateLockManager(); // Start a thread that owns the state-lock in shared mode. StateLockingThread sharedHolderThread = new StateLockingThread(stateLockManager, false); sharedHolderThread.start(); sharedHolderThread.waitUntilStateLockAcquired(); // Expect timeout when the lock is held in shared mode. mExpected.expect(TimeoutException.class); stateLockManager .lockExclusive(new StateLockOptions(GraceMode.TIMEOUT, 10, 0, 100)); // Exit the shared holder. sharedHolderThread.unlockExit(); sharedHolderThread.join(); // Create an exclusive owner of the state-lock. StateLockingThread exclusiveHolderThread = new StateLockingThread(stateLockManager, true); exclusiveHolderThread.start(); exclusiveHolderThread.waitUntilStateLockAcquired(); // Expect timeout when the lock is held in exclusive mode. mExpected.expect(TimeoutException.class); stateLockManager .lockExclusive(new StateLockOptions(GraceMode.TIMEOUT, 10, 0, 100)); // Exit the exclusive holder. exclusiveHolderThread.unlockExit(); exclusiveHolderThread.join(); // Now the lock can be acquired within the grace-cycle. try (LockResource lr = stateLockManager .lockExclusive(new StateLockOptions(GraceMode.TIMEOUT, 10, 0, 100))) { // Acquired within the grace-cycle with no active holder. } }
@Description("Is the IP address in the subnet of IP prefix") @ScalarFunction("is_subnet_of") @SqlType(StandardTypes.BOOLEAN) public static boolean isSubnetOf(@SqlType(StandardTypes.IPPREFIX) Slice ipPrefix, @SqlType(StandardTypes.IPADDRESS) Slice ipAddress) { toInetAddress(ipAddress); return between(ipAddress, ipSubnetMin(ipPrefix), ipSubnetMax(ipPrefix)); }
@Test public void testIsSubnetOf() { assertFunction("IS_SUBNET_OF(IPPREFIX '1.2.3.128/26', IPADDRESS '1.2.3.129')", BOOLEAN, true); assertFunction("IS_SUBNET_OF(IPPREFIX '1.2.3.128/26', IPADDRESS '1.2.5.1')", BOOLEAN, false); assertFunction("IS_SUBNET_OF(IPPREFIX '1.2.3.128/32', IPADDRESS '1.2.3.128')", BOOLEAN, true); assertFunction("IS_SUBNET_OF(IPPREFIX '1.2.3.128/0', IPADDRESS '192.168.5.1')", BOOLEAN, true); assertFunction("IS_SUBNET_OF(IPPREFIX '64:ff9b::17/64', IPADDRESS '64:ff9b::ffff:ff')", BOOLEAN, true); assertFunction("IS_SUBNET_OF(IPPREFIX '64:ff9b::17/64', IPADDRESS '64:ffff::17')", BOOLEAN, false); assertFunction("IS_SUBNET_OF(IPPREFIX '192.168.3.131/26', IPPREFIX '192.168.3.144/30')", BOOLEAN, true); assertFunction("IS_SUBNET_OF(IPPREFIX '1.2.3.128/26', IPPREFIX '1.2.5.1/30')", BOOLEAN, false); assertFunction("IS_SUBNET_OF(IPPREFIX '1.2.3.128/26', IPPREFIX '1.2.3.128/26')", BOOLEAN, true); assertFunction("IS_SUBNET_OF(IPPREFIX '64:ff9b::17/64', IPPREFIX '64:ff9b::ff:25/80')", BOOLEAN, true); assertFunction("IS_SUBNET_OF(IPPREFIX '64:ff9b::17/64', IPPREFIX '64:ffff::17/64')", BOOLEAN, false); assertFunction("IS_SUBNET_OF(IPPREFIX '2804:431:b000::/37', IPPREFIX '2804:431:b000::/38')", BOOLEAN, true); assertFunction("IS_SUBNET_OF(IPPREFIX '2804:431:b000::/38', IPPREFIX '2804:431:b000::/37')", BOOLEAN, false); assertFunction("IS_SUBNET_OF(IPPREFIX '170.0.52.0/22', IPPREFIX '170.0.52.0/24')", BOOLEAN, true); assertFunction("IS_SUBNET_OF(IPPREFIX '170.0.52.0/24', IPPREFIX '170.0.52.0/22')", BOOLEAN, false); }
@Override public CloseableIterator<ScannerReport.CpdTextBlock> readCpdTextBlocks(int componentRef) { ensureInitialized(); return delegate.readCpdTextBlocks(componentRef); }
@Test public void readComponentDuplicationBlocks_is_not_cached() { writer.writeCpdTextBlocks(COMPONENT_REF, of(DUPLICATION_BLOCK)); assertThat(underTest.readCpdTextBlocks(COMPONENT_REF)).isNotSameAs(underTest.readCpdTextBlocks(COMPONENT_REF)); }
public void set(int idx, SelType obj) { val[idx].assignOps(SelOp.ASSIGN, obj); }
@Test public void testSet() { assertEquals("STRING_ARRAY: [foo, bar]", one.type() + ": " + one); one.set(1, SelString.of("baz")); assertEquals("STRING_ARRAY: [foo, baz]", one.type() + ": " + one); }
@Override public RouteContext route(final ShardingRule shardingRule) { RouteContext result = new RouteContext(); String dataSourceName = getDataSourceName(shardingRule.getDataSourceNames()); RouteMapper dataSourceMapper = new RouteMapper(dataSourceName, dataSourceName); if (logicTables.isEmpty()) { result.getRouteUnits().add(new RouteUnit(dataSourceMapper, Collections.emptyList())); } else if (1 == logicTables.size()) { String logicTableName = logicTables.iterator().next(); if (!shardingRule.findShardingTable(logicTableName).isPresent()) { result.getRouteUnits().add(new RouteUnit(dataSourceMapper, Collections.emptyList())); return result; } DataNode dataNode = shardingRule.getDataNode(logicTableName); result.getRouteUnits().add(new RouteUnit(new RouteMapper(dataNode.getDataSourceName(), dataNode.getDataSourceName()), Collections.singletonList(new RouteMapper(logicTableName, dataNode.getTableName())))); } else { routeWithMultipleTables(result, shardingRule); } return result; }
@Test void assertRoutingForBroadcastTableWithCursorStatement() { RouteContext actual = new ShardingUnicastRoutingEngine( mock(CursorStatementContext.class), Collections.singleton("t_config"), new ConnectionContext(Collections::emptySet)).route(shardingRule); assertThat(actual.getRouteUnits().size(), is(1)); assertThat(actual.getRouteUnits().iterator().next().getDataSourceMapper().getActualName(), is("ds_0")); }
public static byte[] tryDecompress(InputStream raw) throws IOException { try (GZIPInputStream gis = new GZIPInputStream(raw); ByteArrayOutputStream out = new ByteArrayOutputStream()) { copy(gis, out); return out.toByteArray(); } }
@Test void testTryDecompressForNotGzip() throws Exception { byte[] testCase = "123".getBytes(Charsets.toCharset("UTF-8")); assertEquals(testCase, IoUtils.tryDecompress(testCase)); }
public ImmutableList<PluginMatchingResult<VulnDetector>> getVulnDetectors( ReconnaissanceReport reconnaissanceReport) { return tsunamiPlugins.entrySet().stream() .filter(entry -> isVulnDetector(entry.getKey())) .map(entry -> matchAllVulnDetectors(entry.getKey(), entry.getValue(), reconnaissanceReport)) .flatMap(Streams::stream) .collect(toImmutableList()); }
@Test public void getVulnDetectors_whenOsServiceFilterHasMatchingClass_returnsMatches() { NetworkService wordPressService = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 80)) .setTransportProtocol(TransportProtocol.TCP) .setServiceName("http") .setSoftware(Software.newBuilder().setName("WordPress")) .build(); NetworkService jenkinsService = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 443)) .setTransportProtocol(TransportProtocol.TCP) .setServiceName("https") .setSoftware(Software.newBuilder().setName("Jenkins")) .build(); NetworkService noNameService = NetworkService.newBuilder() .setNetworkEndpoint(NetworkEndpointUtils.forIpAndPort("1.1.1.1", 12345)) .setTransportProtocol(TransportProtocol.TCP) .build(); ReconnaissanceReport fakeReconnaissanceReport = ReconnaissanceReport.newBuilder() .setTargetInfo( TargetInfo.newBuilder() .addOperatingSystemClasses( OperatingSystemClass.newBuilder() .setVendor("Vendor") .setOsFamily("FakeOS") .setAccuracy(99))) .addNetworkServices(wordPressService) .addNetworkServices(jenkinsService) .addNetworkServices(noNameService) .build(); PluginManager pluginManager = Guice.createInjector( new FakePortScannerBootstrapModule(), new FakeServiceFingerprinterBootstrapModule(), FakeOsServiceFilteringDetector.getModule()) .getInstance(PluginManager.class); ImmutableList<PluginMatchingResult<VulnDetector>> vulnDetectors = pluginManager.getVulnDetectors(fakeReconnaissanceReport); assertThat(vulnDetectors).hasSize(1); assertThat(vulnDetectors.get(0).tsunamiPlugin().getClass()) .isEqualTo(FakeOsServiceFilteringDetector.class); // And matches the ones with the Jenkins software (and noname as well, as no software info is // present there; see hasMatchingServiceName) assertThat(vulnDetectors.get(0).matchedServices()) .containsExactly(jenkinsService, noNameService); }
@Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ConsulConfig that = (ConsulConfig) o; return waitTime == that.waitTime && watchDelay == that.watchDelay && Objects.equals(url, that.url); }
@Test public void testEquals() { assertEquals(consulConfig, consulConfig); assertEquals(consulConfig, that); assertNotEquals(consulConfig, null); assertNotEquals(consulConfig, new Object()); }
@Override public Service queryService(String serviceName, String groupName) throws NacosException { return null; }
@Test void testQueryService() throws NacosException { Service service = delegate.queryService("a", "b"); assertNull(service); }
public SubClusterId getHomeSubcluster( ApplicationSubmissionContext appSubmissionContext, List<SubClusterId> blackListSubClusters) throws YarnException { // the maps are concurrent, but we need to protect from reset() // reinitialization mid-execution by creating a new reference local to this // method. Map<String, SubClusterPolicyConfiguration> cachedConfs = globalConfMap; Map<String, FederationRouterPolicy> policyMap = globalPolicyMap; if (appSubmissionContext == null) { throw new FederationPolicyException( "The ApplicationSubmissionContext cannot be null."); } String queue = appSubmissionContext.getQueue(); // respecting YARN behavior we assume default queue if the queue is not // specified. This also ensures that "null" can be used as a key to get the // default behavior. if (queue == null) { queue = YarnConfiguration.DEFAULT_QUEUE_NAME; } FederationRouterPolicy policy = getFederationRouterPolicy(cachedConfs, policyMap, queue); if (policy == null) { // this should never happen, as the to maps are updated together throw new FederationPolicyException("No FederationRouterPolicy found " + "for queue: " + appSubmissionContext.getQueue() + " (for " + "application: " + appSubmissionContext.getApplicationId() + ") " + "and no default specified."); } return policy.getHomeSubcluster(appSubmissionContext, blackListSubClusters); }
@Test public void testGetHomeSubcluster() throws YarnException { ApplicationSubmissionContext applicationSubmissionContext = mock(ApplicationSubmissionContext.class); when(applicationSubmissionContext.getQueue()).thenReturn(queue1); // the facade only contains the fallback behavior Assert.assertTrue(routerFacade.globalPolicyMap.containsKey(defQueueKey) && routerFacade.globalPolicyMap.size() == 1); // when invoked it returns the expected SubClusterId. SubClusterId chosen = routerFacade.getHomeSubcluster(applicationSubmissionContext, null); Assert.assertTrue(subClusterIds.contains(chosen)); // now the caching of policies must have added an entry for this queue Assert.assertTrue(routerFacade.globalPolicyMap.size() == 2); // after the facade is used the policyMap contains the expected policy type. Assert.assertTrue(routerFacade.globalPolicyMap .get(queue1) instanceof UniformRandomRouterPolicy); // the facade is again empty after reset routerFacade.reset(); // the facade only contains the fallback behavior Assert.assertTrue(routerFacade.globalPolicyMap.containsKey(defQueueKey) && routerFacade.globalPolicyMap.size() == 1); }
@Override public String generateSqlType(Dialect dialect) { switch (dialect.getId()) { case MsSql.ID: return format("NVARCHAR (%d)", columnSize); case Oracle.ID: return format("VARCHAR2 (%d%s)", columnSize, ignoreOracleUnit ? "" : " CHAR"); default: return format("VARCHAR (%d)", columnSize); } }
@Test public void generateSqlType_does_not_set_unit_on_oracle_if_legacy_mode() { VarcharColumnDef def = new VarcharColumnDef.Builder() .setColumnName("issues") .setLimit(10) .setIsNullable(true) .setIgnoreOracleUnit(true) .build(); assertThat(def.generateSqlType(new Oracle())).isEqualTo("VARCHAR2 (10)"); }
@Override @SuppressWarnings("unchecked") public AccessToken load(String token) { DBObject query = new BasicDBObject(); query.put(AccessTokenImpl.TOKEN, cipher.encrypt(token)); final List<DBObject> objects = query(AccessTokenImpl.class, query); if (objects.isEmpty()) { return null; } if (objects.size() > 1) { LOG.error("Multiple access tokens found, this is a serious bug."); throw new IllegalStateException("Access tokens collection has no unique index!"); } return fromDBObject(objects.get(0)); }
@Test public void testLoadNoToken() throws Exception { final AccessToken accessToken = accessTokenService.load("foobar"); assertNull("No token should have been returned", accessToken); }
public static KTableHolder<GenericKey> build( final KGroupedStreamHolder groupedStream, final StreamAggregate aggregate, final RuntimeBuildContext buildContext, final MaterializedFactory materializedFactory) { return build( groupedStream, aggregate, buildContext, materializedFactory, new AggregateParamsFactory() ); }
@Test @SuppressFBWarnings("RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT") public void shouldBuildSessionWindowedAggregateCorrectly() { // Given: givenSessionWindowedAggregate(); // When: final KTableHolder<Windowed<GenericKey>> result = windowedAggregate.build(planBuilder, planInfo); // Then: assertThat(result.getTable(), is(windowedWithWindowBounds)); verify(gracePeriodClause).toDuration(); verify(retentionClause).toDuration(); final InOrder inOrder = Mockito.inOrder( groupedStream, sessionWindowedStream, windowed, windowedWithResults, windowedWithWindowBounds ); inOrder.verify(groupedStream).windowedBy(SessionWindows.ofInactivityGapAndGrace(WINDOW, gracePeriodClause.toDuration()) ); inOrder.verify(sessionWindowedStream).aggregate( initializer, aggregator, merger, sessionWindowMaterialized ); inOrder.verify(windowed).transformValues(any(), any(Named.class)); inOrder.verify(windowedWithResults).transformValues(any(), any(Named.class)); inOrder.verifyNoMoreInteractions(); assertThat(result.getTable(), is(windowedWithWindowBounds)); }
@Override public Mono<LookupUsernameLinkResponse> lookupUsernameLink(final LookupUsernameLinkRequest request) { final UUID linkHandle; try { linkHandle = UUIDUtil.fromByteString(request.getUsernameLinkHandle()); } catch (final IllegalArgumentException e) { throw Status.INVALID_ARGUMENT .withDescription("Could not interpret link handle as UUID") .withCause(e) .asRuntimeException(); } return RateLimitUtil.rateLimitByRemoteAddress(rateLimiters.getUsernameLinkLookupLimiter()) .then(Mono.fromFuture(() -> accountsManager.getByUsernameLinkHandle(linkHandle))) .map(maybeAccount -> maybeAccount .flatMap(Account::getEncryptedUsername) .orElseThrow(Status.NOT_FOUND::asRuntimeException)) .map(usernameCiphertext -> LookupUsernameLinkResponse.newBuilder() .setUsernameCiphertext(ByteString.copyFrom(usernameCiphertext)) .build()); }
@Test void lookupUsernameLink() { final UUID linkHandle = UUID.randomUUID(); final byte[] usernameCiphertext = TestRandomUtil.nextBytes(32); final Account account = mock(Account.class); when(account.getEncryptedUsername()).thenReturn(Optional.of(usernameCiphertext)); when(accountsManager.getByUsernameLinkHandle(linkHandle)) .thenReturn(CompletableFuture.completedFuture(Optional.of(account))); assertEquals(ByteString.copyFrom(usernameCiphertext), unauthenticatedServiceStub().lookupUsernameLink(LookupUsernameLinkRequest.newBuilder() .setUsernameLinkHandle(UUIDUtil.toByteString(linkHandle)) .build()) .getUsernameCiphertext()); when(account.getEncryptedUsername()).thenReturn(Optional.empty()); //noinspection ResultOfMethodCallIgnored GrpcTestUtils.assertStatusException(Status.NOT_FOUND, () -> unauthenticatedServiceStub().lookupUsernameLink(LookupUsernameLinkRequest.newBuilder() .setUsernameLinkHandle(UUIDUtil.toByteString(linkHandle)) .build())); //noinspection ResultOfMethodCallIgnored GrpcTestUtils.assertStatusException(Status.NOT_FOUND, () -> unauthenticatedServiceStub().lookupUsernameLink(LookupUsernameLinkRequest.newBuilder() .setUsernameLinkHandle(UUIDUtil.toByteString(UUID.randomUUID())) .build())); }
private static String getProperty(String name, Configuration configuration) { return Optional.of(configuration.getStringArray(relaxPropertyName(name))) .filter(values -> values.length > 0) .map(Arrays::stream) .map(stream -> stream.collect(Collectors.joining(","))) .orElse(null); }
@Test public void assertHandlesDuplicateRelaxedKeys() throws IOException { Map<String, Object> baseProperties = new HashMap<>(); Map<String, String> mockedEnvironmentVariables = new HashMap<>(); String configFile = File.createTempFile("pinot-configuration-test-4", ".properties").getAbsolutePath(); baseProperties.put("server.host", "ENV_SERVER_HOST"); baseProperties.put("dynamic.env.config", "server.host"); mockedEnvironmentVariables.put("ENV_SERVER_HOST", "test-server-host-1"); mockedEnvironmentVariables.put("ENV.SERVER.HOST", "test-server-host-2"); mockedEnvironmentVariables.put("ENV.SERVER_HOST", "test-server-host-3"); mockedEnvironmentVariables.put("ENV_SERVER.HOST", "test-server-host-4"); mockedEnvironmentVariables.put("ENV_VAR_HOST", "test-host-1"); mockedEnvironmentVariables.put("ENV.VAR_HOST", "test-host-2"); mockedEnvironmentVariables.put("env_var_host", "test-host-3"); mockedEnvironmentVariables.put("env_var.host", "test-host-4"); // config.paths sorts before config_paths, so we should get the right config mockedEnvironmentVariables.put("config.paths", "classpath:/pinot-configuration-4.properties"); mockedEnvironmentVariables.put("config_paths", "classpath:/does-not-exist-configuration.properties"); copyClasspathResource("/pinot-configuration-4.properties", configFile); PinotConfiguration configuration = new PinotConfiguration(baseProperties, mockedEnvironmentVariables); // These are taken from raw and not relaxed values Assert.assertEquals(configuration.getProperty("server.host"), "test-server-host-1"); Assert.assertEquals(configuration.getProperty("pinot.controller.host"), "test-host-1"); }
public WorkDuration subtract(@Nullable WorkDuration with) { if (with != null) { return WorkDuration.createFromMinutes(this.toMinutes() - with.toMinutes(), this.hoursInDay); } else { return this; } }
@Test public void subtract() { // 1d 1h - 5h = 4h WorkDuration result = WorkDuration.create(1, 1, 0, HOURS_IN_DAY).subtract(WorkDuration.createFromValueAndUnit(5, WorkDuration.UNIT.HOURS, HOURS_IN_DAY)); assertThat(result.days()).isZero(); assertThat(result.hours()).isEqualTo(4); assertThat(result.minutes()).isZero(); assertThat(result.hoursInDay()).isEqualTo(HOURS_IN_DAY); // 1h 10m - 30m = 40m result = WorkDuration.create(0, 1, 10, HOURS_IN_DAY).subtract(WorkDuration.createFromValueAndUnit(30, WorkDuration.UNIT.MINUTES, HOURS_IN_DAY)); assertThat(result.days()).isZero(); assertThat(result.hours()).isZero(); assertThat(result.minutes()).isEqualTo(40); assertThat(result.hoursInDay()).isEqualTo(HOURS_IN_DAY); // 30m - 20m = 10m assertThat( WorkDuration.createFromValueAndUnit(30, WorkDuration.UNIT.MINUTES, HOURS_IN_DAY).subtract(WorkDuration.createFromValueAndUnit(20, WorkDuration.UNIT.MINUTES, HOURS_IN_DAY)) .minutes()).isEqualTo(10); assertThat(WorkDuration.createFromValueAndUnit(10, WorkDuration.UNIT.MINUTES, HOURS_IN_DAY).subtract(null).minutes()).isEqualTo(10); }
public int getColumnSize() { return columnSize; }
@Test public void build_string_column_def() { VarcharColumnDef def = new VarcharColumnDef.Builder() .setColumnName("issues") .setLimit(10) .setIsNullable(true) .setDefaultValue("foo") .build(); assertThat(def.getName()).isEqualTo("issues"); assertThat(def.getColumnSize()).isEqualTo(10); assertThat(def.isNullable()).isTrue(); assertThat(def.getDefaultValue()).isEqualTo("foo"); }
@Override public PartialConfig load(File configRepoCheckoutDirectory, PartialConfigLoadContext context) { File[] allFiles = getFiles(configRepoCheckoutDirectory, context); // if context had changed files list then we could parse only new content PartialConfig[] allFragments = parseFiles(allFiles); PartialConfig partialConfig = new PartialConfig(); collectFragments(allFragments, partialConfig); return partialConfig; }
@Test public void shouldFailToLoadDirectoryWithDuplicatedPipeline() throws Exception { GoConfigMother mother = new GoConfigMother(); PipelineConfig pipe1 = mother.cruiseConfigWithOnePipelineGroup().getAllPipelineConfigs().get(0); helper.addFileWithPipeline("pipe1.gocd.xml", pipe1); helper.addFileWithPipeline("pipedup.gocd.xml", pipe1); try { PartialConfig part = xmlPartialProvider.load(tmpFolder, mock(PartialConfigLoadContext.class)); } catch (Exception ex) { assertThat(ex.getMessage(), is("You have defined multiple pipelines called 'pipeline1'. Pipeline names must be unique.")); return; } fail("should have thrown"); }
@Override public void merge(ColumnStatisticsObj aggregateColStats, ColumnStatisticsObj newColStats) { LOG.debug("Merging statistics: [aggregateColStats:{}, newColStats: {}]", aggregateColStats, newColStats); DoubleColumnStatsDataInspector aggregateData = doubleInspectorFromStats(aggregateColStats); DoubleColumnStatsDataInspector newData = doubleInspectorFromStats(newColStats); Double lowValue = mergeLowValue(getLowValue(aggregateData), getLowValue(newData)); if (lowValue != null) { aggregateData.setLowValue(lowValue); } Double highValue = mergeHighValue(getHighValue(aggregateData), getHighValue(newData)); if (highValue != null) { aggregateData.setHighValue(highValue); } aggregateData.setNumNulls(mergeNumNulls(aggregateData.getNumNulls(), newData.getNumNulls())); NumDistinctValueEstimator oldNDVEst = aggregateData.getNdvEstimator(); NumDistinctValueEstimator newNDVEst = newData.getNdvEstimator(); List<NumDistinctValueEstimator> ndvEstimatorsList = Arrays.asList(oldNDVEst, newNDVEst); aggregateData.setNumDVs(mergeNumDistinctValueEstimator(aggregateColStats.getColName(), ndvEstimatorsList, aggregateData.getNumDVs(), newData.getNumDVs())); aggregateData.setNdvEstimator(ndvEstimatorsList.get(0)); KllHistogramEstimator oldKllEst = aggregateData.getHistogramEstimator(); KllHistogramEstimator newKllEst = newData.getHistogramEstimator(); aggregateData.setHistogramEstimator(mergeHistogramEstimator(aggregateColStats.getColName(), oldKllEst, newKllEst)); aggregateColStats.getStatsData().setDoubleStats(aggregateData); }
@Test public void testMergeNullWithNonNullValues() { ColumnStatisticsObj aggrObj = createColumnStatisticsObj(new ColStatsBuilder<>(double.class) .low(null) .high(null) .numNulls(0) .numDVs(0) .build()); ColumnStatisticsObj newObj = createColumnStatisticsObj(new ColStatsBuilder<>(double.class) .low(1d) .high(3d) .numNulls(4) .numDVs(2) .hll(1d, 3d, 3d) .kll(1d, 3d, 3d) .build()); merger.merge(aggrObj, newObj); ColumnStatisticsData expectedColumnStatisticsData = new ColStatsBuilder<>(double.class) .low(1d) .high(3d) .numNulls(4) .numDVs(2) .hll(1d, 3d, 3d) .kll(1d, 3d, 3d) .build(); assertEquals(expectedColumnStatisticsData, aggrObj.getStatsData()); }
public Progress getSortPhase() { return sortPhase; }
@Test public void testShufflePermissions() throws Exception { JobConf conf = new JobConf(); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077"); conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath()); MapOutputFile mof = new MROutputFiles(); mof.setConf(conf); TaskAttemptID attemptId = new TaskAttemptID("12345", 1, TaskType.MAP, 1, 1); MapTask mockTask = mock(MapTask.class); doReturn(mof).when(mockTask).getMapOutputFile(); doReturn(attemptId).when(mockTask).getTaskID(); doReturn(new Progress()).when(mockTask).getSortPhase(); TaskReporter mockReporter = mock(TaskReporter.class); doReturn(new Counter()).when(mockReporter).getCounter( any(TaskCounter.class)); MapOutputCollector.Context ctx = new MapOutputCollector.Context(mockTask, conf, mockReporter); MapOutputBuffer<Object, Object> mob = new MapOutputBuffer<>(); mob.init(ctx); mob.flush(); mob.close(); Path outputFile = mof.getOutputFile(); FileSystem lfs = FileSystem.getLocal(conf); FsPermission perms = lfs.getFileStatus(outputFile).getPermission(); Assert.assertEquals("Incorrect output file perms", (short)0640, perms.toShort()); Path indexFile = mof.getOutputIndexFile(); perms = lfs.getFileStatus(indexFile).getPermission(); Assert.assertEquals("Incorrect index file perms", (short)0640, perms.toShort()); }
public JibContainer runBuild() throws BuildStepsExecutionException, IOException, CacheDirectoryCreationException { try { logger.accept(LogEvent.lifecycle("")); logger.accept(LogEvent.lifecycle(startupMessage)); JibContainer jibContainer = jibContainerBuilder.containerize(containerizer); logger.accept(LogEvent.lifecycle("")); logger.accept(LogEvent.lifecycle(successMessage)); // when an image is built, write out the digest and id if (imageDigestOutputPath != null) { String imageDigest = jibContainer.getDigest().toString(); Files.write(imageDigestOutputPath, imageDigest.getBytes(StandardCharsets.UTF_8)); } if (imageIdOutputPath != null) { String imageId = jibContainer.getImageId().toString(); Files.write(imageIdOutputPath, imageId.getBytes(StandardCharsets.UTF_8)); } if (imageJsonOutputPath != null) { ImageMetadataOutput metadataOutput = ImageMetadataOutput.fromJibContainer(jibContainer); String imageJson = metadataOutput.toJson(); Files.write(imageJsonOutputPath, imageJson.getBytes(StandardCharsets.UTF_8)); } return jibContainer; } catch (HttpHostConnectException ex) { // Failed to connect to registry. throw new BuildStepsExecutionException(helpfulSuggestions.forHttpHostConnect(), ex); } catch (RegistryUnauthorizedException ex) { handleRegistryUnauthorizedException(ex, helpfulSuggestions); } catch (RegistryCredentialsNotSentException ex) { throw new BuildStepsExecutionException(helpfulSuggestions.forCredentialsNotSent(), ex); } catch (RegistryAuthenticationFailedException ex) { if (ex.getCause() instanceof ResponseException) { handleRegistryUnauthorizedException( new RegistryUnauthorizedException( ex.getServerUrl(), ex.getImageName(), (ResponseException) ex.getCause()), helpfulSuggestions); } else { // Unknown cause throw new BuildStepsExecutionException(helpfulSuggestions.none(), ex); } } catch (UnknownHostException ex) { throw new BuildStepsExecutionException(helpfulSuggestions.forUnknownHost(), ex); } catch (InsecureRegistryException ex) { throw new BuildStepsExecutionException(helpfulSuggestions.forInsecureRegistry(), ex); } catch (RegistryException ex) { String message = Verify.verifyNotNull(ex.getMessage()); // keep null-away happy throw new BuildStepsExecutionException(message, ex); } catch (ExecutionException ex) { String message = ex.getCause().getMessage(); throw new BuildStepsExecutionException( message == null ? "(null exception message)" : message, ex.getCause()); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); throw new BuildStepsExecutionException(helpfulSuggestions.none(), ex); } throw new IllegalStateException("unreachable"); }
@Test public void testBuildImage_registryCredentialsNotSentException() throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException, ExecutionException { Mockito.doThrow(mockRegistryCredentialsNotSentException) .when(mockJibContainerBuilder) .containerize(mockContainerizer); try { testJibBuildRunner.runBuild(); Assert.fail(); } catch (BuildStepsExecutionException ex) { Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.forCredentialsNotSent(), ex.getMessage()); } }
public static BigDecimal satoshiToBtc(long satoshis) { return new BigDecimal(satoshis).movePointLeft(SMALLEST_UNIT_EXPONENT); }
@Test public void testSatoshiToBtc() { assertThat(new BigDecimal("-92233720368.54775808"), Matchers.comparesEqualTo(satoshiToBtc(Long.MIN_VALUE))); assertThat(new BigDecimal("-0.00000001"), Matchers.comparesEqualTo(satoshiToBtc(NEGATIVE_SATOSHI.value))); assertThat(BigDecimal.ZERO, Matchers.comparesEqualTo(satoshiToBtc(0L))); assertThat(new BigDecimal("0.00000001"), Matchers.comparesEqualTo(satoshiToBtc(SATOSHI.value))); assertThat(BigDecimal.ONE, Matchers.comparesEqualTo(satoshiToBtc(COIN.value))); assertThat(new BigDecimal(50), Matchers.comparesEqualTo(satoshiToBtc(FIFTY_COINS.value))); assertThat(new BigDecimal("92233720368.54775807"), Matchers.comparesEqualTo(satoshiToBtc(Long.MAX_VALUE))); }
@ConstantFunction.List(list = { @ConstantFunction(name = "year", argTypes = {DATETIME}, returnType = SMALLINT, isMonotonic = true), @ConstantFunction(name = "year", argTypes = {DATE}, returnType = SMALLINT, isMonotonic = true) }) public static ConstantOperator year(ConstantOperator arg) { return ConstantOperator.createSmallInt((short) arg.getDatetime().getYear()); }
@Test public void year() { ConstantOperator date = ConstantOperator.createDatetime(LocalDateTime.of(2000, 10, 21, 12, 0)); ConstantOperator result = ScalarOperatorFunctions.year(date); assertEquals(Type.SMALLINT, result.getType()); assertEquals(2000, result.getSmallint()); }
public void setData( String data ) { this.data = data; }
@Test public void setData() { DragAndDropContainer dnd = new DragAndDropContainer( DragAndDropContainer.TYPE_BASE_STEP_TYPE, "Step Name" ); dnd.setData( "Another Step" ); assertEquals( "Another Step", dnd.getData() ); }
public void processVerstrekkingAanAfnemer(VerstrekkingAanAfnemer verstrekkingAanAfnemer){ if (logger.isDebugEnabled()) logger.debug("Processing verstrekkingAanAfnemer: {}", marshallElement(verstrekkingAanAfnemer)); Afnemersbericht afnemersbericht = afnemersberichtRepository.findByOnzeReferentie(verstrekkingAanAfnemer.getReferentieId()); if(mismatch(verstrekkingAanAfnemer, afnemersbericht)){ digidXClient.remoteLogBericht(Log.NO_RELATION_TO_SENT_MESSAGE, verstrekkingAanAfnemer, afnemersbericht); return; } switch (verstrekkingAanAfnemer.getGebeurtenissoort().getNaam()) { case "Null" -> { logger.info("Start processing Null message"); dglResponseService.processNullMessage(verstrekkingAanAfnemer.getGebeurtenisinhoud().getNull(), afnemersbericht); digidXClient.remoteLogWithoutRelatingToAccount(Log.MESSAGE_PROCESSED, "Null"); } case "Ag01" -> { logger.info("Start processing Ag01 message"); dglResponseService.processAg01(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAg01(), afnemersbericht); digidXClient.remoteLogBericht(Log.MESSAGE_PROCESSED, verstrekkingAanAfnemer, afnemersbericht); } case "Ag31" -> { logger.info("Start processing Ag31 message"); dglResponseService.processAg31(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAg31(), afnemersbericht); digidXClient.remoteLogBericht(Log.MESSAGE_PROCESSED, verstrekkingAanAfnemer, afnemersbericht); } case "Af01" -> { logger.info("Start processing Af01 message"); dglResponseService.processAf01(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAf01(), afnemersbericht); digidXClient.remoteLogBericht(Log.MESSAGE_PROCESSED, verstrekkingAanAfnemer, afnemersbericht); } case "Af11" -> { logger.info("Start processing Af11 message"); dglResponseService.processAf11(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAf11(), afnemersbericht); digidXClient.remoteLogWithoutRelatingToAccount(Log.MESSAGE_PROCESSED, "Af11"); } case "Gv01" -> { logger.info("Start processing Gv01 message"); Gv01 gv01 = verstrekkingAanAfnemer.getGebeurtenisinhoud().getGv01(); dglResponseService.processGv01(gv01); String bsn = CategorieUtil.findBsnOudeWaarde(gv01.getCategorie()); if (bsn == null) { bsn = CategorieUtil.findBsn(gv01.getCategorie()); } digidXClient.remoteLogSpontaneVerstrekking(Log.MESSAGE_PROCESSED, "Gv01", gv01.getANummer(), bsn); } case "Ng01" -> { logger.info("Start processing Ng01 message"); Ng01 ng01 = verstrekkingAanAfnemer.getGebeurtenisinhoud().getNg01(); dglResponseService.processNg01(ng01); digidXClient.remoteLogSpontaneVerstrekking(Log.MESSAGE_PROCESSED, "Ng01", CategorieUtil.findANummer(ng01.getCategorie()), ""); } case "Wa11" -> { logger.info("Start processing Wa11 message"); dglResponseService.processWa11(verstrekkingAanAfnemer.getGebeurtenisinhoud().getWa11()); } } }
@Test public void testProcessWa11(){ String testAnummer = "SSSSSSSSSS"; String testNieuwAnummer = "SSSSSSSSSS"; String datumGeldigheid = "SSSSSSSS"; Wa11 testWa11 = TestDglMessagesUtil.createTestWa11(testAnummer, testNieuwAnummer, datumGeldigheid); VerstrekkingInhoudType inhoudType = new VerstrekkingInhoudType(); inhoudType.setWa11(testWa11); GeversioneerdType type = new GeversioneerdType(); type.setNaam("Wa11"); when(verstrekkingAanAfnemer.getReferentieId()).thenReturn(null); when(verstrekkingAanAfnemer.getGebeurtenissoort()).thenReturn(type); when(verstrekkingAanAfnemer.getGebeurtenisinhoud()).thenReturn(inhoudType); classUnderTest.processVerstrekkingAanAfnemer(verstrekkingAanAfnemer); verify(dglResponseService, times(1)).processWa11(testWa11); }
public int andCardinality(BitmapCollection bitmaps) { ImmutableRoaringBitmap left = reduceInternal(); ImmutableRoaringBitmap right = bitmaps.reduceInternal(); if (!_inverted) { if (!bitmaps._inverted) { return ImmutableRoaringBitmap.andCardinality(left, right); } return ImmutableRoaringBitmap.andNotCardinality(left, right); } else { if (!bitmaps._inverted) { return ImmutableRoaringBitmap.andNotCardinality(right, left); } return _numDocs - ImmutableRoaringBitmap.orCardinality(left, right); } }
@Test(dataProvider = "andCardinalityTestCases") public void testAndCardinality(int numDocs, ImmutableRoaringBitmap left, boolean leftInverted, ImmutableRoaringBitmap right, boolean rightInverted, int expected) { assertEquals(new BitmapCollection(numDocs, leftInverted, left).andCardinality( new BitmapCollection(numDocs, rightInverted, right)), expected); assertEquals(new BitmapCollection(numDocs, leftInverted, split(left)).andCardinality( new BitmapCollection(numDocs, rightInverted, right)), expected); assertEquals(new BitmapCollection(numDocs, leftInverted, left).andCardinality( new BitmapCollection(numDocs, rightInverted, split(right))), expected); assertEquals(new BitmapCollection(numDocs, leftInverted, split(left)).andCardinality( new BitmapCollection(numDocs, rightInverted, split(right))), expected); }
@Override public List<DistroData> getVerifyData() { List<DistroData> result = null; for (String each : clientManager.allClientId()) { Client client = clientManager.getClient(each); if (null == client || !client.isEphemeral()) { continue; } if (clientManager.isResponsibleClient(client)) { DistroClientVerifyInfo verifyData = new DistroClientVerifyInfo(client.getClientId(), client.getRevision()); DistroKey distroKey = new DistroKey(client.getClientId(), TYPE); DistroData data = new DistroData(distroKey, ApplicationUtils.getBean(Serializer.class).serialize(verifyData)); data.setType(DataOperation.VERIFY); if (result == null) { result = new LinkedList<>(); } result.add(data); } } return result; }
@Test void testGetVerifyData() { client.setRevision(10L); when(clientManager.allClientId()).thenReturn(Collections.singletonList(CLIENT_ID)); List<DistroData> list = distroClientDataProcessor.getVerifyData(); assertEquals(1, list.size()); assertEquals(DataOperation.VERIFY, list.iterator().next().getType()); assertEquals(CLIENT_ID, list.iterator().next().getDistroKey().getResourceKey()); assertEquals(DistroClientDataProcessor.TYPE, list.iterator().next().getDistroKey().getResourceType()); }