focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public TableInfo getTable(Long dataSourceConfigId, String name) { return CollUtil.getFirst(getTableList0(dataSourceConfigId, name)); }
@Test public void testGetTable() { // 准备参数 Long dataSourceConfigId = randomLongId(); // mock 方法 DataSourceConfigDO dataSourceConfig = new DataSourceConfigDO().setUsername("sa").setPassword("") .setUrl("jdbc:h2:mem:testdb"); when(dataSourceConfigService.getDataSourceConfig(eq(dataSourceConfigId))) .thenReturn(dataSourceConfig); // 调用 TableInfo tableInfo = databaseTableService.getTable(dataSourceConfigId, "infra_config"); // 断言 assertTableInfo(tableInfo); }
@GwtIncompatible("java.util.regex.Pattern") public void doesNotContainMatch(@Nullable Pattern regex) { checkNotNull(regex); if (actual == null) { failWithActual("expected a string that does not contain a match for", regex); return; } Matcher matcher = regex.matcher(actual); if (matcher.find()) { failWithoutActual( fact("expected not to contain a match for", regex), fact("but contained", matcher.group()), fact("full string", actualCustomStringRepresentationForPackageMembersToCall())); } }
@Test public void stringDoesNotContainMatchString() { assertThat("aaa").doesNotContainMatch(".*b.*"); expectFailureWhenTestingThat("aba").doesNotContainMatch(".*b.*"); assertFailureValue("expected not to contain a match for", ".*b.*"); }
static Time toTime(final JsonNode object) { if (object instanceof NumericNode) { return returnTimeOrThrow(object.asLong()); } if (object instanceof TextNode) { try { return returnTimeOrThrow(Long.parseLong(object.textValue())); } catch (final NumberFormatException e) { throw failedStringCoercionException(SqlBaseType.TIME); } } throw invalidConversionException(object, SqlBaseType.TIME); }
@Test public void shouldNotConvertOverflowNumberToTime() { try { JsonSerdeUtils.toTime(JsonNodeFactory.instance.numberNode(3000000000L)); } catch (Exception e) { assertThat(e.getMessage(), equalTo("Time values must use number of milliseconds greater than 0 and less than 86400000.")); } }
@Override public RestLiResponseData<BatchCreateResponseEnvelope> buildRestLiResponseData(Request request, RoutingResult routingResult, Object result, Map<String, String> headers, List<HttpCookie> cookies) { Object altKey = null; if (routingResult.getContext().hasParameter(RestConstants.ALT_KEY_PARAM)) { altKey = routingResult.getContext().getParameter(RestConstants.ALT_KEY_PARAM); } final ProtocolVersion protocolVersion = ProtocolVersionUtil.extractProtocolVersion(headers); final ResourceContext resourceContext = routingResult.getContext(); if (result instanceof BatchCreateKVResult && resourceContext.isReturnEntityRequested()) { BatchCreateKVResult<?, ?> list = (BatchCreateKVResult<?, ?>) result; if (list.getResults() == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null List inside of a BatchCreateKVResult returned by the resource method: " + routingResult .getResourceMethod()); } List<BatchCreateResponseEnvelope.CollectionCreateResponseItem> collectionCreateList = new ArrayList<>(list.getResults().size()); TimingContextUtil.beginTiming(routingResult.getContext().getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); for (CreateKVResponse<?, ?> createKVResponse : list.getResults()) { if (createKVResponse == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null element inside of List inside of a BatchCreateKVResult returned by the resource method: " + routingResult.getResourceMethod()); } else { Object id = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(createKVResponse.getId(), routingResult); if (createKVResponse.getError() == null) { DataMap entityData = createKVResponse.getEntity() != null ? createKVResponse.getEntity().data() : null; final DataMap data = RestUtils.projectFields(entityData, resourceContext); CreateIdEntityStatus<Object, RecordTemplate> entry = new CreateIdEntityStatus<>( createKVResponse.getStatus().getCode(), id, new AnyRecord(data), getLocationUri(request, id, altKey, protocolVersion), // location uri null, protocolVersion); collectionCreateList.add(new BatchCreateResponseEnvelope.CollectionCreateResponseItem(entry)); } else { collectionCreateList.add(new BatchCreateResponseEnvelope.CollectionCreateResponseItem(createKVResponse.getError())); } } } TimingContextUtil.endTiming(routingResult.getContext().getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); return new RestLiResponseDataImpl<>(new BatchCreateResponseEnvelope(HttpStatus.S_200_OK, collectionCreateList, true), headers, cookies); } else { List<? extends CreateResponse> createResponses = extractCreateResponseList(result); //Verify that a null list was not passed into the BatchCreateResult. If so, this is a developer error. if (createResponses == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null List inside of a BatchCreateResult returned by the resource method: " + routingResult .getResourceMethod()); } List<BatchCreateResponseEnvelope.CollectionCreateResponseItem> collectionCreateList = new ArrayList<>(createResponses.size()); for (CreateResponse createResponse : createResponses) { //Verify that a null element was not passed into the BatchCreateResult list. If so, this is a developer error. if (createResponse == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null element inside of List inside of a BatchCreateResult returned by the resource method: " + routingResult.getResourceMethod()); } else { Object id = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(createResponse.getId(), routingResult); if (createResponse.getError() == null) { CreateIdStatus<Object> entry = new CreateIdStatus<>( createResponse.getStatus().getCode(), id, getLocationUri(request, id, altKey, protocolVersion), // location uri null, protocolVersion); collectionCreateList.add(new BatchCreateResponseEnvelope.CollectionCreateResponseItem(entry)); } else { collectionCreateList.add(new BatchCreateResponseEnvelope.CollectionCreateResponseItem(createResponse.getError())); } } } return new RestLiResponseDataImpl<>(new BatchCreateResponseEnvelope(HttpStatus.S_200_OK, collectionCreateList, false), headers, cookies); } }
@Test(dataProvider = "returnEntityData") @SuppressWarnings({"Duplicates", "unchecked"}) public void testReturnEntityInBuildRestLiResponseData(Object batchCreateResult, List<CreateResponse> createResponses, boolean isReturnEntityRequested, boolean expectEntityReturned) throws URISyntaxException { ServerResourceContext mockContext = EasyMock.createMock(ServerResourceContext.class); EasyMock.expect(mockContext.hasParameter(RestConstants.ALT_KEY_PARAM)).andReturn(false).atLeastOnce(); EasyMock.expect(mockContext.isReturnEntityRequested()).andReturn(isReturnEntityRequested); EasyMock.expect(mockContext.getProjectionMode()).andReturn(ProjectionMode.AUTOMATIC); EasyMock.expect(mockContext.getProjectionMask()).andReturn(null); EasyMock.expect(mockContext.getRawRequestContext()).andReturn(new RequestContext()).anyTimes(); EasyMock.expect(mockContext.getAlwaysProjectedFields()).andReturn(Collections.emptySet()).anyTimes(); EasyMock.replay(mockContext); ResourceMethodDescriptor mockDescriptor = getMockResourceMethodDescriptor(null); RoutingResult routingResult = new RoutingResult(mockContext, mockDescriptor); BatchCreateResponseBuilder responseBuilder = new BatchCreateResponseBuilder(new ErrorResponseBuilder()); RestRequest request = new RestRequestBuilder(new URI("/foo")).build(); RestLiResponseData<BatchCreateResponseEnvelope> responseData = responseBuilder.buildRestLiResponseData(request, routingResult, batchCreateResult, Collections.emptyMap(), Collections.emptyList()); BatchCreateResponseEnvelope responseEnvelope = responseData.getResponseEnvelope(); Assert.assertEquals(responseEnvelope.isGetAfterCreate(), expectEntityReturned); Assert.assertEquals(responseEnvelope.getCreateResponses().size(), createResponses.size()); for (int i = 0; i < createResponses.size(); i++) { CreateIdStatus<Long> createIdStatus = (CreateIdStatus<Long>) responseEnvelope.getCreateResponses().get(i).getRecord(); CreateResponse createResponse = createResponses.get(i); Assert.assertEquals(createIdStatus.getStatus().intValue(), HttpStatus.S_201_CREATED.getCode()); Assert.assertEquals(createIdStatus.getLocation(), "/foo/" + createResponse.getId()); if (expectEntityReturned) { CreateIdEntityStatus<Long, Foo> createIdEntityStatus = (CreateIdEntityStatus<Long, Foo>) createIdStatus; Assert.assertEquals(createIdEntityStatus.getEntity(), ((CreateKVResponse) createResponse).getEntity()); } } }
protected static String resolveEnvVars(String input) { Preconditions.checkNotNull(input); // match ${ENV_VAR_NAME} Pattern p = Pattern.compile("\\$\\{(\\w+)\\}"); Matcher m = p.matcher(input); StringBuffer sb = new StringBuffer(); while (m.find()) { String envVarName = m.group(1); String envVarValue = System.getenv(envVarName); m.appendReplacement(sb, null == envVarValue ? "" : envVarValue); } m.appendTail(sb); return sb.toString(); }
@Test public void resolveEnvVar() throws Exception { SystemLambda.withEnvironmentVariable("VARNAME", "varvalue").execute(() -> { String resolved = EnvVarResolverProperties.resolveEnvVars("padding ${VARNAME} padding"); assertEquals(resolved, "padding varvalue padding"); }); }
@Override public CompletionStage<V> putAsync(K key, V value) { return cache.getAndPutAsync(key, value); }
@Test(expected = MethodNotAvailableException.class) public void testPutAsyncWithTtl() { adapter.putAsync(42, "value", 1, TimeUnit.MILLISECONDS); }
public static Builder newBuilder() { return new Builder(); }
@Test void testForDefaultSortOrder() { //Creating parameter object with default value for SortOrder set ParameterObject params = ParameterObject.newBuilder() .withType("sneakers") .sortBy("brand") .build(); assertEquals(ParameterObject.DEFAULT_SORT_ORDER, params.getSortOrder(), "Default SortOrder is not set."); LOGGER.info("{} Default parameter value is set during object creation as no value is passed." , "SortOrder"); }
@Override public void onIssue(Component component, DefaultIssue issue) { if (issue.authorLogin() != null) { return; } loadScmChangesets(component); Optional<String> scmAuthor = guessScmAuthor(issue, component); if (scmAuthor.isPresent()) { if (scmAuthor.get().length() <= IssueDto.AUTHOR_MAX_SIZE) { issueUpdater.setNewAuthor(issue, scmAuthor.get(), changeContext); } else { LOGGER.debug("SCM account '{}' is too long to be stored as issue author", scmAuthor.get()); } } if (issue.assignee() == null) { UserIdDto userId = scmAuthor.map(scmAccountToUser::getNullable).orElse(defaultAssignee.loadDefaultAssigneeUserId()); issueUpdater.setNewAssignee(issue, userId, changeContext); } }
@Test void assign_but_do_not_set_author_if_too_long() { String scmAuthor = range(0, 256).mapToObj(i -> "s").collect(joining()); addScmUser(scmAuthor, buildUserId("u123", "John C")); setSingleChangeset(scmAuthor, 123456789L, "rev-1"); DefaultIssue issue = newIssueOnLines(1); underTest.onIssue(FILE, issue); assertThat(issue.authorLogin()).isNull(); assertThat(issue.assignee()).isEqualTo("u123"); assertThat(issue.assigneeLogin()).isEqualTo("John C"); assertThat(logTester.logs(Level.DEBUG)).contains("SCM account '" + scmAuthor + "' is too long to be stored as issue author"); }
static MemberMap createNew(MemberImpl... members) { return createNew(0, members); }
@Test(expected = IllegalArgumentException.class) public void create_failsWithDuplicateAddress() { MemberImpl member1 = newMember(5000); MemberImpl member2 = newMember(5000); MemberMap.createNew(member1, member2); }
public String getCallbackUri(String secretString) { String cbid = cbidGenerator.generate(secretString); if (!isValidPortNumber(callbackPort)) { throw new AssertionError("Invalid callbackPort number specified"); } HostAndPort hostAndPort = callbackPort == 80 ? HostAndPort.fromHost(callbackAddress) : HostAndPort.fromParts(callbackAddress, callbackPort); // check if the specified address is raw IP or domain if (InetAddresses.isInetAddress(callbackAddress)) { return CbidProcessor.addCbidToUrl(cbid, hostAndPort); } else if (InternetDomainName.isValid(callbackAddress)) { return CbidProcessor.addCbidToSubdomain(cbid, hostAndPort); } // Should never reach here throw new AssertionError("Unrecognized address format, should be Ip address or valid domain"); }
@Test public void getCallbackUri_invalidCallbackPort_throwsError() { client = new TcsClient(VALID_DOMAIN, 100000, VALID_URL, httpClient); assertThrows(AssertionError.class, () -> client.getCallbackUri(SECRET)); }
public Result checkConnectionToRepository(String pluginId, final RepositoryConfiguration repositoryConfiguration) { return pluginRequestHelper.submitRequest(pluginId, REQUEST_CHECK_REPOSITORY_CONNECTION, new DefaultPluginInteractionCallback<>() { @Override public String requestBody(String resolvedExtensionVersion) { return messageConverter(resolvedExtensionVersion).requestMessageForCheckConnectionToRepository(repositoryConfiguration); } @Override public Result onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return messageConverter(resolvedExtensionVersion).responseMessageForCheckConnectionToRepository(responseBody); } }); }
@Test public void shouldTalkToPluginToCheckRepositoryConnectionFailure() throws Exception { String expectedRequestBody = "{\"repository-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}}}"; String expectedResponseBody = "{\"status\":\"failed\",messages=[\"message-one\",\"message-two\"]}"; when(pluginManager.isPluginOfType(PACKAGE_MATERIAL_EXTENSION, PLUGIN_ID)).thenReturn(true); when(pluginManager.submitTo(eq(PLUGIN_ID), eq(PACKAGE_MATERIAL_EXTENSION), requestArgumentCaptor.capture())).thenReturn(DefaultGoPluginApiResponse.success(expectedResponseBody)); Result result = extension.checkConnectionToRepository(PLUGIN_ID, repositoryConfiguration); assertRequest(requestArgumentCaptor.getValue(), PACKAGE_MATERIAL_EXTENSION, "1.0", PackageRepositoryExtension.REQUEST_CHECK_REPOSITORY_CONNECTION, expectedRequestBody); assertFailureResult(result, List.of("message-one", "message-two")); }
public Optional<Integer> findProjectionIndex(final String projectionName) { int result = 1; for (Projection each : projections) { if (projectionName.equalsIgnoreCase(SQLUtils.getExactlyValue(each.getExpression()))) { return Optional.of(result); } result++; } return Optional.empty(); }
@Test void assertFindProjectionIndexFailure() { Projection projection = getColumnProjection(); ProjectionsContext projectionsContext = new ProjectionsContext(0, 0, true, Collections.singleton(projection)); Optional<Integer> actual = projectionsContext.findProjectionIndex(""); assertFalse(actual.isPresent()); }
public static ByteBuf copyDouble(double value) { ByteBuf buf = buffer(8); buf.writeDouble(value); return buf; }
@Test public void testWrapSingleDouble() { ByteBuf buffer = copyDouble(42); assertEquals(8, buffer.capacity()); assertEquals(42, buffer.readDouble(), 0.01); assertFalse(buffer.isReadable()); buffer.release(); }
@Override public List<MetaData> convert(final String json) { return GsonUtils.getInstance().fromList(json, MetaData.class); }
@Test public void testConvert() { List<MetaData> metaDataList = new LinkedList<>(); metaDataList.add(MetaData.builder().id("1").appName("appName1").enabled(true).build()); metaDataList.add(MetaData.builder().id("1").appName("appName2").methodName("POST").build()); Gson gson = new Gson(); String json = gson.toJson(metaDataList); List<MetaData> convertedList = metaDataHandler.convert(json); assertThat(convertedList, is(metaDataList)); }
@Override public <VR> KStream<K, VR> flatMapValues(final ValueMapper<? super V, ? extends Iterable<? extends VR>> mapper) { return flatMapValues(withKey(mapper)); }
@Test public void shouldNotAllowNullMapperOnFlatMapValuesWithKey() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.flatMapValues((ValueMapperWithKey<Object, Object, ? extends Iterable<Object>>) null)); assertThat(exception.getMessage(), equalTo("valueMapper can't be null")); }
public MaestroTracingContext initTracingContext( WorkflowSummary workflowSummary, StepRuntimeSummary runtimeSummary) { try { final Span initSpan = tracer.nextSpan(); try { initSpan.name(INIT_SPAN_NAME); tagInitSpan(initSpan, workflowSummary, runtimeSummary); initSpan.start(); } finally { // always send the init Span asap with tags. initSpan.finish(); } final TraceContext initContext = initSpan.context(); LOG.trace( "Created initial span with traceID: {}, spanID: {}, maestro object {}, {}", initContext.traceIdString(), initContext.spanIdString(), workflowSummary.getIdentity(), runtimeSummary.getIdentity()); // runSpan corresponds to the lifecycle of a step attempt. Span runSpan = tracer.nextSpanWithParent(samplerFunctionAlways, samplerFunctionAlwaysArg, initContext); runSpan.name(RUN_SPAN_NAME); TraceContext runContext = runSpan.context(); LOG.trace( "Created run span with traceID: {}, spanID: {}, parentSpanID: {}", runContext.traceIdString(), runContext.spanIdString(), runContext.parentIdString()); return MaestroTracingContext.fromTraceContext(runContext); } catch (Exception ex) { LOG.warn( "Exception caught when initializing trace for {}, {}", workflowSummary.getIdentity(), runtimeSummary.getIdentity(), ex); return null; } }
@Test public void testInitTracingContext() { Span initSpan = mock(Span.class); TraceContext initContext = TraceContext.newBuilder().traceId(49L).spanId(50L).build(); when(initSpan.context()).thenReturn(initContext); when(mockTracer.nextSpan()).thenReturn(initSpan); MaestroTracingManager tm = new TestTracingManager(mockTracer); Span runSpan = mock(Span.class); TraceContext runContext = TraceContext.newBuilder() .traceId(1L) .traceIdHigh(2L) .spanId(3L) .parentId(4L) .sampled(null) .build(); when(runSpan.context()).thenReturn(runContext); when(mockTracer.nextSpanWithParent(any(), any(), any())).thenReturn(runSpan); WorkflowSummary workflowSummary = new WorkflowSummary(); workflowSummary.setWorkflowId("wf"); workflowSummary.setWorkflowInstanceId(123L); workflowSummary.setWorkflowRunId(1L); workflowSummary.setWorkflowUuid("wf-uuid"); StepRuntimeSummary runtimeSummary = StepRuntimeSummary.builder() .stepId("step") .stepInstanceId(456L) .stepAttemptId(3L) .stepInstanceUuid("step-uuid") .build(); MaestroTracingContext context = tm.initTracingContext(workflowSummary, runtimeSummary); verify(initSpan, times(1)).name(any()); verify(initSpan, atLeast(8)).tag(any(), any()); verify(initSpan, times(1)).start(); verify(initSpan, times(1)).finish(); verify(runSpan, times(1)).name(any()); assertEquals(context.toTraceContext(), runContext); }
@Override public void start(Callback<None> callback) { LOG.info("{} enabled", _printName); Callback<None> prepareWarmUpCallback = new Callback<None>() { @Override public void onError(Throwable e) { if (e instanceof TimeoutException) { LOG.info("{} hit timeout: {}ms. The WarmUp will continue in background", _printName, _warmUpTimeoutMillis); callback.onSuccess(None.none()); } else { LOG.error("{} failed to fetch dual read mode, continuing warmup.", _printName, e); } continueWarmUp(callback); } @Override public void onSuccess(None result) { continueWarmUp(callback); } }; _loadBalancer.start(new Callback<None>() { @Override public void onError(Throwable e) { callback.onError(e); } @Override public void onSuccess(None result) { _allStartTime = _timeSupplier.get(); _executorService.submit(() -> prepareWarmUp(prepareWarmUpCallback)); } }); }
@Test(timeOut = 10000, retryAnalyzer = ThreeRetries.class) public void testThrottlingUnlimitedRequests() throws URISyntaxException, InterruptedException, ExecutionException, TimeoutException { int NRequests = 500; createNServicesIniFiles(NRequests); int concurrentRequestsHugeNumber = 999999999; int concurrentRequestsCheckHigher = WarmUpLoadBalancer.DEFAULT_CONCURRENT_REQUESTS; TestLoadBalancer balancer = new TestLoadBalancer(50); AtomicInteger requestCount = balancer.getRequestCount(); LoadBalancer warmUpLoadBalancer = new WarmUpLoadBalancer(balancer, balancer, Executors.newSingleThreadScheduledExecutor(), _tmpdir.getAbsolutePath(), MY_SERVICES_FS, _FSBasedDownstreamServicesFetcher, WarmUpLoadBalancer.DEFAULT_SEND_REQUESTS_TIMEOUT_SECONDS, concurrentRequestsHugeNumber); FutureCallback<None> callback = new FutureCallback<>(); warmUpLoadBalancer.start(callback); boolean triggeredAtLeastOnce = false; while (!callback.isDone()) { int currentConcurrentRequests = balancer.getRequestCount().get() - balancer.getCompletedRequestCount().get(); if (currentConcurrentRequests > concurrentRequestsCheckHigher) { triggeredAtLeastOnce = true; } Thread.sleep(50); } Assert.assertTrue(triggeredAtLeastOnce); Assert.assertEquals(NRequests, requestCount.get()); }
public int nodeId() { return nodeId; }
@Test public void testFatalMetadataErrorDuringSnapshotLoading() throws Exception { InitialSnapshot invalidSnapshot = new InitialSnapshot(singletonList( new ApiMessageAndVersion(new PartitionRecord(), (short) 0)) ); LocalLogManagerTestEnv.Builder logEnvBuilder = new LocalLogManagerTestEnv.Builder(3) .setSnapshotReader(FileRawSnapshotReader.open( invalidSnapshot.tempDir.toPath(), new OffsetAndEpoch(0, 0) )); try (LocalLogManagerTestEnv logEnv = logEnvBuilder.build()) { try (QuorumControllerTestEnv controlEnv = new QuorumControllerTestEnv.Builder(logEnv).build()) { TestUtils.waitForCondition(() -> controlEnv.controllers().stream().allMatch( controller -> controlEnv.fatalFaultHandler(controller.nodeId()).firstException() != null), "At least one controller failed to detect the fatal fault" ); controlEnv.ignoreFatalFaults(); } } }
public PathAttributes attributes() { return attributes; }
@Test public void testPopulateRegion() { { Path container = new Path("test", EnumSet.of(Path.Type.directory)); container.attributes().setRegion("DFW"); Path path = new Path(container, "f", EnumSet.of(Path.Type.file)); assertEquals("DFW", path.attributes().getRegion()); } { Path container = new Path("test", EnumSet.of(Path.Type.directory)); container.attributes().setRegion("DFW"); assertEquals("DFW", container.attributes().getRegion()); } }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testBagAddBeforeRead() throws Exception { StateTag<BagState<String>> addr = StateTags.bag("bag", StringUtf8Coder.of()); BagState<String> bag = underTest.state(NAMESPACE, addr); SettableFuture<Iterable<String>> future = SettableFuture.create(); when(mockReader.bagFuture(key(NAMESPACE, "bag"), STATE_FAMILY, StringUtf8Coder.of())) .thenReturn(future); bag.readLater(); bag.add("hello"); waitAndSet(future, Collections.singletonList("world"), 200); assertThat(bag.read(), Matchers.containsInAnyOrder("hello", "world")); bag.add("goodbye"); assertThat(bag.read(), Matchers.containsInAnyOrder("hello", "world", "goodbye")); }
public EndpointResponse handleKsqlStatements( final KsqlSecurityContext securityContext, final KsqlRequest request ) { // CHECKSTYLE_RULES.ON: JavaNCSS // CHECKSTYLE_RULES.ON: CyclomaticComplexity // Set masked sql statement if request is not from OldApiUtils.handleOldApiRequest ApiServerUtils.setMaskedSqlIfNeeded(request); QueryLogger.info("Received: " + request.toStringWithoutQuery(), request.getMaskedKsql()); throwIfNotConfigured(); activenessRegistrar.updateLastRequestTime(); try { CommandStoreUtil.httpWaitForCommandSequenceNumber( commandRunner.getCommandQueue(), request, distributedCmdResponseTimeout); final Map<String, Object> configProperties = request.getConfigOverrides(); denyListPropertyValidator.validateAll(configProperties); final KsqlRequestConfig requestConfig = new KsqlRequestConfig(request.getRequestProperties()); final List<ParsedStatement> statements = ksqlEngine.parse(request.getUnmaskedKsql()); validator.validate( SandboxedServiceContext.create(securityContext.getServiceContext()), statements, new SessionProperties( configProperties, localHost, localUrl, requestConfig.getBoolean(KsqlRequestConfig.KSQL_REQUEST_INTERNAL_REQUEST), request.getSessionVariables() ), request.getUnmaskedKsql() ); // log validated statements for query anonymization statements.forEach(s -> { if (s.getUnMaskedStatementText().toLowerCase().contains("terminate") || s.getUnMaskedStatementText().toLowerCase().contains("drop")) { QueryLogger.info("Query terminated", s.getMaskedStatementText()); } else { QueryLogger.info("Query created", s.getMaskedStatementText()); } }); final KsqlEntityList entities = handler.execute( securityContext, statements, new SessionProperties( configProperties, localHost, localUrl, requestConfig.getBoolean(KsqlRequestConfig.KSQL_REQUEST_INTERNAL_REQUEST), request.getSessionVariables() ) ); QueryLogger.info( "Processed successfully: " + request.toStringWithoutQuery(), request.getMaskedKsql() ); addCommandRunnerWarning( entities, commandRunnerWarning); return EndpointResponse.ok(entities); } catch (final KsqlRestException e) { QueryLogger.info( "Processed unsuccessfully: " + request.toStringWithoutQuery(), request.getMaskedKsql(), e ); throw e; } catch (final KsqlStatementException e) { QueryLogger.info( "Processed unsuccessfully: " + request.toStringWithoutQuery(), request.getMaskedKsql(), e ); final EndpointResponse response; if (e.getProblem() == KsqlStatementException.Problem.STATEMENT) { response = Errors.badStatement(e.getRawUnloggedDetails(), e.getSqlStatement()); } else if (e.getProblem() == KsqlStatementException.Problem.OTHER) { response = Errors.serverErrorForStatement(e, e.getSqlStatement()); } else { response = Errors.badRequest(e); } return errorHandler.generateResponse(e, response); } catch (final KsqlException e) { QueryLogger.info( "Processed unsuccessfully: " + request.toStringWithoutQuery(), request.getMaskedKsql(), e ); return errorHandler.generateResponse(e, Errors.badRequest(e)); } catch (final Exception e) { QueryLogger.info( "Processed unsuccessfully: " + request.toStringWithoutQuery(), request.getMaskedKsql(), e ); return errorHandler.generateResponse( e, Errors.serverErrorForStatement(e, request.getMaskedKsql()) ); } }
@Test public void queryLoggerShouldReceiveStatementsWhenHandleKsqlStatement() { try (MockedStatic<QueryLogger> logger = Mockito.mockStatic(QueryLogger.class)) { ksqlResource.handleKsqlStatements(securityContext, VALID_EXECUTABLE_REQUEST); logger.verify(() -> QueryLogger.info("Query created", VALID_EXECUTABLE_REQUEST.getMaskedKsql()), times(1)); } }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } UserAccessToken that = (UserAccessToken) o; return Objects.equals(token, that.token); }
@Test void equals_whenAnotherInstanceButSameToken_shouldReturnTrue() { UserAccessToken userAccessToken1 = new UserAccessToken("token"); UserAccessToken userAccessToken2 = new UserAccessToken("token"); assertThat(userAccessToken1.equals(userAccessToken2)).isTrue(); assertThat(userAccessToken1).hasSameHashCodeAs(userAccessToken2); }
@SuppressWarnings("rawtypes") @Deprecated public synchronized Topology addProcessor(final String name, final org.apache.kafka.streams.processor.ProcessorSupplier supplier, final String... parentNames) { return addProcessor( name, new ProcessorSupplier<Object, Object, Object, Object>() { @Override public Set<StoreBuilder<?>> stores() { return supplier.stores(); } @Override public org.apache.kafka.streams.processor.api.Processor<Object, Object, Object, Object> get() { return ProcessorAdapter.adaptRaw(supplier.get()); } }, parentNames ); }
@Test public void shouldFailOnUnknownSource() { assertThrows(TopologyException.class, () -> topology.addProcessor("processor", new MockApiProcessorSupplier<>(), "source")); }
@Override public void close() { inner.close(); }
@Test public void shouldCloseInner() { // When: deserializer.close(); // Then: verify(inner).close(); }
public static long hash64(Object o) { if (o == null) { return 0l; } else if (o instanceof String) { final byte[] bytes = ((String) o).getBytes(); return hash64(bytes, bytes.length); } else if (o instanceof byte[]) { final byte[] bytes = (byte[]) o; return hash64(bytes, bytes.length); } return hash64(o.toString()); }
@Test public void testHash64() throws Exception { final long actualHash = MurmurHash.hash64("hashthis"); final long expectedHash = -8896273065425798843L; assertEquals("MurmurHash.hash64(String) returns wrong hash value", expectedHash, actualHash); }
public static String getHost() { return getHost(null); }
@Test public void testGetHost() throws Exception { // first net card Vector<InetAddress> addresses1 = new Vector<>(); addresses1.add(InetAddress.getByAddress("local-host", new byte[]{(byte) 192, (byte) 168, (byte) 50, (byte) 66})); NetworkInterface nic1 = mock(NetworkInterface.class); when(nic1.getInetAddresses()).thenReturn(addresses1.elements()); when(nic1.getName()).thenReturn("local"); // second net card Vector<InetAddress> addresses2 = new Vector<>(); addresses2.add(InetAddress.getByAddress("eth0-host", new byte[]{(byte) 172, (byte) 168, (byte) 166, (byte) 12})); NetworkInterface nic2 = mock(NetworkInterface.class); when(nic2.getInetAddresses()).thenReturn(addresses2.elements()); when(nic2.getName()).thenReturn("eth0"); // third net card Vector<InetAddress> addresses3 = new Vector<>(); addresses3.add(InetAddress.getByAddress("eth1-host", new byte[]{(byte) 10, (byte) 150, (byte) 111, (byte) 66})); NetworkInterface nic3 = mock(NetworkInterface.class); when(nic3.getInetAddresses()).thenReturn(addresses3.elements()); when(nic3.getName()).thenReturn("eth1"); // add all Vector<NetworkInterface> nics = new Vector<>(); nics.add(nic1); nics.add(nic2); nics.add(nic3); networkInterfaceMockedStatic.when((MockedStatic.Verification) NetworkInterface.getNetworkInterfaces()).thenReturn(nics.elements()); String prefix1 = "172.168"; assertEquals("172.168.166.12", IpUtils.getHost(prefix1)); }
@Override public Iterator<QueryableEntry> iterator() { return new It(); }
@Test public void contains_nonMatchingPredicate() { Set<QueryableEntry> entries = generateEntries(100000); AndResultSet resultSet = new AndResultSet(entries, null, asList(Predicates.alwaysFalse())); assertNotContains(resultSet, entries.iterator().next()); }
@VisibleForTesting Object recursiveDefaults(Object object, Map<String, List<PluginDefault>> defaults) { if (object instanceof Map<?, ?> value) { value = value .entrySet() .stream() .map(e -> new AbstractMap.SimpleEntry<>( e.getKey(), recursiveDefaults(e.getValue(), defaults) )) .collect(HashMap::new, (m, v) -> m.put(v.getKey(), v.getValue()), HashMap::putAll); if (value.containsKey("type")) { value = defaults(value, defaults); } return value; } else if (object instanceof Collection<?> value) { return value .stream() .map(r -> recursiveDefaults(r, defaults)) .toList(); } else { return object; } }
@Test void shouldInjectGivenDefaultsIncludingType() { // Given Map<String, List<PluginDefault>> defaults = Map.of( "io.kestra.test", List.of(new PluginDefault("io.kestra.test", false, Map.of("taskRunner", Map.of("type", "io.kestra.test")))) ); // When Object result = pluginDefaultService.recursiveDefaults(TEST_FLOW_AS_MAP, defaults); // Then Assertions.assertEquals(Map.of( "id", "test", "namespace", "type", "tasks", List.of( Map.of( "id", "my-task", "type", "io.kestra.test", "taskRunner", Map.of("type", "io.kestra.test") ) ) ), result); }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AccessTokenAuthToken that = (AccessTokenAuthToken) o; return Objects.equals(token, that.token) && Objects.equals(host, that.host); }
@Test public void testEquals() throws Exception { EqualsVerifier.forClass(AccessTokenAuthToken.class) .verify(); }
@Override public boolean isInputConsumable( SchedulingExecutionVertex executionVertex, Set<ExecutionVertexID> verticesToSchedule, Map<ConsumedPartitionGroup, Boolean> consumableStatusCache) { for (ConsumedPartitionGroup consumedPartitionGroup : executionVertex.getConsumedPartitionGroups()) { if (!consumableStatusCache.computeIfAbsent( consumedPartitionGroup, (group) -> isConsumedPartitionGroupConsumable(group, verticesToSchedule))) { return false; } } return true; }
@Test void testHybridAndBlockingInputButBlockingInputNotFinished() { final TestingSchedulingTopology topology = new TestingSchedulingTopology(); final List<TestingSchedulingExecutionVertex> producers1 = topology.addExecutionVertices().withParallelism(1).finish(); final List<TestingSchedulingExecutionVertex> producers2 = topology.addExecutionVertices().withParallelism(1).finish(); final List<TestingSchedulingExecutionVertex> consumer = topology.addExecutionVertices().withParallelism(1).finish(); topology.connectAllToAll(producers1, consumer) .withResultPartitionState(ResultPartitionState.CREATED) .withResultPartitionType(ResultPartitionType.BLOCKING) .finish(); topology.connectAllToAll(producers2, consumer) .withResultPartitionState(ResultPartitionState.CREATED) .withResultPartitionType(ResultPartitionType.HYBRID_FULL) .finish(); DefaultInputConsumableDecider inputConsumableDecider = createDefaultInputConsumableDecider( Collections.singleton(producers2.get(0).getId()), topology); assertThat( inputConsumableDecider.isInputConsumable( consumer.get(0), Collections.emptySet(), new HashMap<>())) .isFalse(); }
@Udf(description = "Converts a TIMESTAMP value from one timezone to another") public Timestamp convertTz( @UdfParameter( description = "The TIMESTAMP value.") final Timestamp timestamp, @UdfParameter( description = "The fromTimeZone in java.util.TimeZone ID format. For example: \"UTC\"," + " \"America/Los_Angeles\", \"PST\", \"Europe/London\"") final String fromTimeZone, @UdfParameter( description = "The toTimeZone in java.util.TimeZone ID format. For example: \"UTC\"," + " \"America/Los_Angeles\", \"PST\", \"Europe/London\"") final String toTimeZone ) { if (timestamp == null || fromTimeZone == null || toTimeZone == null) { return null; } try { final long offset = TimeZone.getTimeZone(ZoneId.of(toTimeZone)).getOffset(timestamp.getTime()) - TimeZone.getTimeZone(ZoneId.of(fromTimeZone)).getOffset(timestamp.getTime()); return new Timestamp(timestamp.getTime() + offset); } catch (DateTimeException e) { throw new KsqlFunctionException("Invalid time zone: " + e.getMessage()); } }
@Test public void shouldConvertTimezoneWithOffset() { // When: final Object result = udf.convertTz(Timestamp.valueOf("2000-01-01 00:00:00"), "+0200", "+0500"); // Then: assertThat(result, is(Timestamp.valueOf("2000-01-01 03:00:00"))); }
public void notifyThreadIdle() { this.occupiedWorkers.decrementAndGet(); onboardNewWorkTask.runTaskThreadSafe(); }
@Test void onThreadIdleNewWorkIsOnboardedAndThreadSafe() throws InterruptedException { final Job enqueuedJob = anEnqueuedJob().build(); final List<Job> jobs = List.of(enqueuedJob); lenient().when(storageProvider.getJobsToProcess(eq(backgroundJobServer), any())).thenReturn(jobs); final List<Throwable> throwables = new CopyOnWriteArrayList<>(); UncaughtExceptionHandler uncaughtExceptionHandler = (thread, throwable) -> throwables.add(throwable); Random random = new Random(); final int concurrency = 10_000; CountDownLatch countDownLatch = new CountDownLatch(concurrency); List<Thread> threads = new ArrayList<>(); for (int i = 0; i < concurrency; i++) { final Thread thread = new Thread(() -> { SleepUtils.sleep(random.nextInt(10)); jobSteward.notifyThreadIdle(); countDownLatch.countDown(); }); thread.setUncaughtExceptionHandler(uncaughtExceptionHandler); threads.add(thread); } try (TemporarilyLogLevelChange ignored = temporarilyChangeLogLevel(OnboardNewWorkTask.class, Level.INFO)) { threads.forEach(Thread::start); countDownLatch.await(); } assertThat(throwables).isEmpty(); verify(backgroundJobServer, times(concurrency)).isRunning(); verify(backgroundJobServer, atLeast(1)).processJob(enqueuedJob); // due to ReentrantLock }
@Override public void handleSplitRequest(int subtask, @Nullable String hostname) { if (!context.registeredReaders().containsKey(subtask)) { // reader failed between sending the request and now. skip this request. return; } if (splitAssigner == null) { // No DynamicFilteringData is received before the first split request, // create a split assigner that handles all splits createSplitAssigner(null); } if (LOG.isDebugEnabled()) { final String hostInfo = hostname == null ? "(no host locality info)" : "(on host '" + hostname + "')"; LOG.debug("Subtask {} {} is requesting a file source split", subtask, hostInfo); } final Optional<FileSourceSplit> nextSplit = getNextUnassignedSplit(hostname); if (nextSplit.isPresent()) { final FileSourceSplit split = nextSplit.get(); context.assignSplit((SplitT) split, subtask); assignedSplits.add(split.splitId()); LOG.debug("Assigned split to subtask {} : {}", subtask, split); } else { context.signalNoMoreSplits(subtask); LOG.info("No more splits available for subtask {}", subtask); } }
@Test void testEnumerating() { String[] splits = new String[] {"0", "1", "2", "3", "4"}; MockSplitEnumeratorContext<TestSplit> context = new MockSplitEnumeratorContext<>(1); context.registerReader(new ReaderInfo(0, "")); DynamicFileSplitEnumerator<TestSplit> enumerator = new DynamicFileSplitEnumerator<>( context, () -> new TestDynamicFileEnumerator(splits, splits), SimpleSplitAssigner::new); for (String ignored : splits) { enumerator.handleSplitRequest(0, null); } assertThat(getAssignedSplits(context)).containsExactlyInAnyOrder(splits); }
public static <K, V> Read<K, V> read() { return new AutoValue_CdapIO_Read.Builder<K, V>().build(); }
@Test public void testReadObjectCreationFailsIfCdapPluginClassIsNotSupported() { assertThrows( UnsupportedOperationException.class, () -> CdapIO.<String, String>read().withCdapPluginClass(EmployeeBatchSource.class)); }
public static void setAssignExpressionValue(final BlockStmt body, final String assignExpressionName, final Expression value) { AssignExpr assignExpr = getAssignExpression(body, assignExpressionName) .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, assignExpressionName, body))); assignExpr.setValue(value); }
@Test void setAssignExpressionValueNoAssignExpressions() { assertThatExceptionOfType(KiePMMLException.class).isThrownBy(() -> { final BlockStmt body = new BlockStmt(); CommonCodegenUtils.setAssignExpressionValue(body, "NOMATCH", new DoubleLiteralExpr(24.22)); }); }
public static Duration buildDuration(final long size, final String timeUnitName) { final TimeUnit timeUnit = parseTimeUnit(timeUnitName.toUpperCase()); return Duration.ofNanos(timeUnit.toNanos(size)); }
@Test public void shouldBuildDuration() { assertThat(DurationParser.buildDuration(20, "DAYS"), is(Duration.ofDays(20))); }
public static void initColumns(Table tbl, List<ImportColumnDesc> columnExprs, Map<String, Pair<String, List<String>>> columnToHadoopFunction) throws UserException { initColumns(tbl, columnExprs, columnToHadoopFunction, null, null, null, null, null, false, false, Lists.newArrayList()); }
@Test public void testInitColumnsColumnInSchemaAndExprArgs() throws UserException { table = new OlapTable(1, "test", columns, KeysType.AGG_KEYS, new SinglePartitionInfo(), new RandomDistributionInfo(3)); // columns String c0Name = "c0"; columns.add(new Column(c0Name, Type.INT, true, null, true, null, "")); columnExprs.add(new ImportColumnDesc(c0Name, null)); String c1Name = "c1"; columns.add(new Column(c1Name, Type.INT, true, null, true, null, "")); columnExprs.add(new ImportColumnDesc(c1Name, null)); String c2Name = "c2"; columns.add(new Column(c2Name, Type.BITMAP, false, AggregateType.BITMAP_UNION, true, null, "")); columnExprs.add(new ImportColumnDesc(c2Name, null)); String c3Name = "c3"; columns.add(new Column(c3Name, Type.VARCHAR, false, AggregateType.REPLACE, true, null, "")); columnExprs.add(new ImportColumnDesc(c3Name, null)); String c31Name = "c31"; columns.add(new Column(c31Name, Type.INT, true, AggregateType.SUM, true, null, "")); // column mappings // c1 = year(c1) List<Expr> params1 = Lists.newArrayList(); params1.add(new SlotRef(null, c1Name)); Expr mapping1 = new FunctionCallExpr(FunctionSet.YEAR, params1); columnExprs.add(new ImportColumnDesc(c1Name, mapping1)); // c2 = to_bitmap(c2) List<Expr> params2 = Lists.newArrayList(); params2.add(new SlotRef(null, c2Name)); Expr mapping2 = new FunctionCallExpr(FunctionSet.TO_BITMAP, params2); columnExprs.add(new ImportColumnDesc(c2Name, mapping2)); // c31 = c3 + 1 Expr mapping3 = new ArithmeticExpr(ArithmeticExpr.Operator.ADD, new SlotRef(null, c3Name), new IntLiteral(1, Type.INT)); columnExprs.add(new ImportColumnDesc(c31Name, mapping3)); new Expectations() { { table.getBaseSchema(); result = columns; table.getColumn(c0Name); result = columns.get(0); table.getColumn(c1Name); result = columns.get(1); table.getColumn(c2Name); result = columns.get(2); table.getColumn(c3Name); result = columns.get(3); table.getColumn(c31Name); result = columns.get(4); } }; Load.initColumns(table, columnExprs, null, exprsByName, analyzer, srcTupleDesc, slotDescByName, params, true, true, columnsFromPath); // check System.out.println(slotDescByName); Assert.assertEquals(4, slotDescByName.size()); SlotDescriptor c1SlotDesc = slotDescByName.get(c1Name); Assert.assertTrue(c1SlotDesc.getColumn().getType().equals(Type.VARCHAR)); SlotDescriptor c2SlotDesc = slotDescByName.get(c2Name); Assert.assertTrue(c2SlotDesc.getColumn().getType().equals(Type.VARCHAR)); SlotDescriptor c3SlotDesc = slotDescByName.get(c3Name); Assert.assertTrue(c3SlotDesc.getColumn().getType().equals(Type.VARCHAR)); }
@Override public DistributionCounter createNewCounter() { return new DistributionCounter(); }
@Test public void check_new_counter_class() { assertThat(BASIC_DISTRIBUTION_FORMULA.createNewCounter().getClass()).isEqualTo(DistributionFormula.DistributionCounter.class); }
public void createUser( IUser newUser ) throws KettleException { ensureHasPermissions(); ProxyPentahoUser user = UserRoleHelper.convertToPentahoProxyUser( newUser ); try { ProxyPentahoUser[] existingUsers = userRoleWebService.getUsers(); if ( existsAmong( existingUsers, user ) ) { throw userExistsException(); } } catch ( UserRoleException e ) { throw cannotCreateUserException( newUser, e ); } try { userRoleWebService.createUser( user ); if ( newUser instanceof IEEUser ) { userRoleWebService .setRoles( user, UserRoleHelper.convertToPentahoProxyRoles( ( (IEEUser) newUser ).getRoles() ) ); } lookupCache.insertUserToLookupSet( newUser ); fireUserRoleListChange(); } catch ( Exception e ) { // it is the only way to determine AlreadyExistsException if ( e.getCause().toString().contains( "org.pentaho.platform.api.engine.security.userroledao.AlreadyExistsException" ) ) { throw userExistsException(); } throw cannotCreateUserException( newUser, e ); } }
@Test public void createUser_CreatesSuccessfully_WhenNameIsUnique() throws Exception { final String name = "user"; delegate.createUser( new UserInfo( name ) ); verify( roleWebService ).createUser( any( ProxyPentahoUser.class ) ); }
public static String removeTrailingSlashes(String path) { return TRAILING_SLASH_PATTERN.matcher(path).replaceFirst(""); }
@Test public void removeTrailingSlashes_whenSingleTrailingSlash_removesTrailingSlashes() { assertThat(removeTrailingSlashes("/a/b/c/")).isEqualTo("/a/b/c"); }
public static String determinePackageVersion(Class<?> clz) { try { String implementationVersion = clz.getPackage().getImplementationVersion(); if (implementationVersion != null) { return implementationVersion; } CodeSource codeSource = clz.getProtectionDomain().getCodeSource(); if (codeSource == null) { return null; } URL codeSourceLocation = codeSource.getLocation(); URLConnection connection = codeSourceLocation.openConnection(); if (connection instanceof JarURLConnection) { return getImplementationVersion(((JarURLConnection) connection).getJarFile()); } final File file = new File(codeSourceLocation.toURI()); // idea 场景,查找版本失败 if (!file.exists() || file.isDirectory()) { return "UNKNOWN"; } try (JarFile jarFile = new JarFile(file)) { return getImplementationVersion(jarFile); } } catch (Throwable t) { log.warn("[JavaUtils] determinePackageVersion for clz[{}] failed, msg: {}", clz.getSimpleName(), t.toString()); // windows 下无权限访问会一直报错一直重试,需要在此兼容 return "UNKNOWN"; } }
@Test void determinePackageVersion() { String packageVersion = JavaUtils.determinePackageVersion(LoggerFactory.class); log.info("[determinePackageVersion] LoggerFactory's package version: {}", packageVersion); }
public void validate(ExternalIssueReport report, Path reportPath) { if (report.rules != null && report.issues != null) { Set<String> ruleIds = validateRules(report.rules, reportPath); validateIssuesCctFormat(report.issues, ruleIds, reportPath); } else if (report.rules == null && report.issues != null) { String documentationLink = documentationLinkGenerator.getDocumentationLink(DOCUMENTATION_SUFFIX); LOGGER.warn("External issues were imported with a deprecated format which will be removed soon. " + "Please switch to the newest format to fully benefit from Clean Code: {}", documentationLink); validateIssuesDeprecatedFormat(report.issues, reportPath); } else { throw new IllegalStateException(String.format("Failed to parse report '%s': invalid report detected.", reportPath)); } }
@Test public void validate_whenMissingNameField_shouldThrowException() throws IOException { ExternalIssueReport report = read(REPORTS_LOCATION); report.rules[0].name = null; assertThatThrownBy(() -> validator.validate(report, reportPath)) .isInstanceOf(IllegalStateException.class) .hasMessage("Failed to parse report 'report-path': missing mandatory field 'name'."); }
public static InetSocketAddress getInetSocketAddressFromRpcURL(String rpcURL) throws Exception { // Pekko URLs have the form schema://systemName@host:port/.... if it's a remote Pekko URL try { final Address address = getAddressFromRpcURL(rpcURL); if (address.host().isDefined() && address.port().isDefined()) { return new InetSocketAddress(address.host().get(), (int) address.port().get()); } else { throw new MalformedURLException(); } } catch (MalformedURLException e) { throw new Exception("Could not retrieve InetSocketAddress from Pekko URL " + rpcURL); } }
@Test void getHostFromRpcURLThrowsExceptionIfAddressCannotBeRetrieved() throws Exception { final String localRpcURL = "pekko://flink/user/actor"; assertThatThrownBy(() -> PekkoUtils.getInetSocketAddressFromRpcURL(localRpcURL)) .isInstanceOf(Exception.class); }
static String determineDisplayName(String elementName) { return replaceUnderscoresBySpaces() ? underscoresReplacedBySpaces(elementName) : elementName; }
@Test public void returns_original_name_if_property_is_set_to_false() { // Given String elementName = "some_element_Name"; ArchConfiguration.get().setProperty(JUNIT_DISPLAYNAME_REPLACE_UNDERSCORES_BY_SPACES_PROPERTY_NAME, "false"); // When String displayName = DisplayNameResolver.determineDisplayName(elementName); // Then assertThat(displayName).isEqualTo("some_element_Name"); }
public void error(String errorStatus, String errorScope, String errorOpenIDConfiguration) { if (Objects.requireNonNull(errorStatus).isEmpty()) throw new IllegalArgumentException("error status must not be empty"); this.errorStatus = errorStatus; this.errorScope = errorScope; this.errorOpenIDConfiguration = errorOpenIDConfiguration; this.token = null; }
@Test public void testError() { String errorStatus = "errorStatus"; String errorScope = "errorScope"; String errorOpenIDConfiguration = "errorOpenIDConfiguration"; OAuthBearerValidatorCallback callback = new OAuthBearerValidatorCallback(TOKEN.value()); callback.error(errorStatus, errorScope, errorOpenIDConfiguration); assertEquals(errorStatus, callback.errorStatus()); assertEquals(errorScope, callback.errorScope()); assertEquals(errorOpenIDConfiguration, callback.errorOpenIDConfiguration()); assertNull(callback.token()); }
public List<BlameLine> blame(Git git, String filename) { BlameResult blameResult; try { blameResult = git.blame() // Equivalent to -w command line option .setTextComparator(RawTextComparator.WS_IGNORE_ALL) .setFilePath(filename).call(); } catch (Exception e) { throw new IllegalStateException("Unable to blame file " + filename, e); } List<BlameLine> lines = new ArrayList<>(); if (blameResult == null) { LOG.debug("Unable to blame file {}. It is probably a symlink.", filename); return emptyList(); } for (int i = 0; i < blameResult.getResultContents().size(); i++) { if (blameResult.getSourceAuthor(i) == null || blameResult.getSourceCommit(i) == null) { LOG.debug("Unable to blame file {}. No blame info at line {}. Is file committed? [Author: {} Source commit: {}]", filename, i + 1, blameResult.getSourceAuthor(i), blameResult.getSourceCommit(i)); return emptyList(); } lines.add(new BlameLine() .date(blameResult.getSourceCommitter(i).getWhen()) .revision(blameResult.getSourceCommit(i).getName()) .author(blameResult.getSourceAuthor(i).getEmailAddress())); } return lines; }
@Test public void new_file_returns_no_blame() throws IOException { String relativePath2 = "src/main/java/org/dummy/Dummy2.java"; // Emulate a new file FileUtils.copyFile(new File(baseDir.toFile(), DUMMY_JAVA), new File(baseDir.toFile(), relativePath2)); try (Git git = loadRepository(baseDir)) { assertThat(jGitBlameCommand.blame(git, DUMMY_JAVA)).hasSize(29); assertThat(jGitBlameCommand.blame(git, relativePath2)).isEmpty(); } }
@Teardown public void teardown() throws Exception { try (AutoCloseable c1 = committerFactory; AutoCloseable c2 = backlogReaderFactory) {} }
@Test public void tearDownClosesBacklogReaderFactory() throws Exception { sdf.teardown(); verify(backlogReaderFactory).close(); }
static Serde<List<?>> createSerde(final PersistenceSchema schema) { final List<SimpleColumn> columns = schema.columns(); if (columns.isEmpty()) { // No columns: return new KsqlVoidSerde<>(); } if (columns.size() != 1) { throw new KsqlException("The '" + FormatFactory.KAFKA.name() + "' format only supports a single field. Got: " + columns); } final SimpleColumn singleColumn = columns.get(0); final Class<?> javaType = SchemaConverters.sqlToJavaConverter() .toJavaType(singleColumn.type()); return createSerde(singleColumn, javaType); }
@Test public void shouldSerializeNullAsNull() { // Given: final PersistenceSchema schema = schemaWithFieldOfType(SqlTypes.INTEGER); final Serde<List<?>> serde = KafkaSerdeFactory.createSerde(schema); // When: final byte[] result = serde.serializer().serialize("topic", null); // Then: assertThat(result, is(nullValue())); }
@Override public Boolean exists(final String key) { try { List<InstanceInfo> instances = eurekaClient.getInstancesByVipAddressAndAppName(null, key, true); return !instances.isEmpty(); } catch (Exception e) { throw new ShenyuException(e); } }
@Test void testExists() { final String key = "testService"; final List<InstanceInfo> instances = new ArrayList<>(); instances.add(mock(InstanceInfo.class)); // Mock this service exists when(eurekaClient.getInstancesByVipAddressAndAppName(null, key, true)).thenReturn(instances); assertTrue(eurekaDiscoveryServiceUnderTest.exists(key)); // Mock the service does not exist when(eurekaClient.getInstancesByVipAddressAndAppName(null, key, true)).thenReturn(Collections.emptyList()); assertFalse(eurekaDiscoveryServiceUnderTest.exists(key)); // Mock the throwing of Exception when(eurekaClient.getInstancesByVipAddressAndAppName(null, key, true)).thenThrow(new ShenyuException("test")); assertThrows(ShenyuException.class, () -> eurekaDiscoveryServiceUnderTest.exists(key)); }
@Override public boolean hasSameTypeAs(Task task) { if (!getClass().equals(task.getClass())) { return false; } return this.pluginConfiguration.equals(((PluggableTask) task).pluginConfiguration); }
@Test public void shouldReturnFalseWhenPluginConfigurationForTwoPluggableTasksIsDifferent() { PluginConfiguration pluginConfiguration1 = new PluginConfiguration("test-plugin-1", "1.0"); PluginConfiguration pluginConfiguration2 = new PluginConfiguration("test-plugin-2", "1.0"); PluggableTask pluggableTask1 = new PluggableTask(pluginConfiguration1, new Configuration()); PluggableTask pluggableTask2 = new PluggableTask(pluginConfiguration2, new Configuration()); assertFalse(pluggableTask1.hasSameTypeAs(pluggableTask2)); }
public void init(ScannerReportWriter writer) { File analysisLog = writer.getFileStructure().analysisLog(); try (BufferedWriter fileWriter = Files.newBufferedWriter(analysisLog.toPath(), StandardCharsets.UTF_8)) { writePlugins(fileWriter); writeBundledAnalyzers(fileWriter); writeGlobalSettings(fileWriter); writeProjectSettings(fileWriter); writeModulesSettings(fileWriter); } catch (IOException e) { throw new IllegalStateException("Unable to write analysis log", e); } }
@Test public void shouldShortenModuleProperties() throws Exception { File baseDir = temp.newFolder(); DefaultInputModule rootModule = new DefaultInputModule(ProjectDefinition.create() .setBaseDir(baseDir) .setWorkDir(temp.newFolder()) .setProperty("sonar.projectKey", "foo") .setProperty("sonar.projectBaseDir", baseDir.toString()) .setProperty("sonar.aVeryLongProp", StringUtils.repeat("abcde", 1000))); when(store.allModules()).thenReturn(singletonList(rootModule)); when(hierarchy.root()).thenReturn(rootModule); publisher.init(writer); assertThat(writer.getFileStructure().analysisLog()).exists(); assertThat(FileUtils.readFileToString(writer.getFileStructure().analysisLog(), StandardCharsets.UTF_8)).containsSubsequence( "sonar.aVeryLongProp=" + StringUtils.repeat("abcde", 199) + "ab...", "sonar.projectBaseDir=" + baseDir, "sonar.projectKey=foo"); }
@Override public Iterable<DiscoveryNode> discoverNodes() { try { Map<String, String> addresses = awsClient.getAddresses(); logResult(addresses); List<DiscoveryNode> result = new ArrayList<>(); for (Map.Entry<String, String> entry : addresses.entrySet()) { for (int port = portRange.getFromPort(); port <= portRange.getToPort(); port++) { Address privateAddress = new Address(entry.getKey(), port); Address publicAddress = new Address(entry.getValue(), port); result.add(new SimpleDiscoveryNode(privateAddress, publicAddress)); } } return result; } catch (NoCredentialsException e) { if (!isKnownExceptionAlreadyLogged) { LOGGER.warning("No AWS credentials found! Starting standalone. To use Hazelcast AWS discovery, configure" + " properties (access-key, secret-key) or assign the required IAM Role to your EC2 instance"); LOGGER.finest(e); isKnownExceptionAlreadyLogged = true; } } catch (RestClientException e) { if (e.getHttpErrorCode() == HTTP_FORBIDDEN) { if (!isKnownExceptionAlreadyLogged) { LOGGER.warning("AWS IAM Role Policy missing 'ec2:DescribeInstances' Action! Starting standalone."); isKnownExceptionAlreadyLogged = true; } LOGGER.finest(e); } else { LOGGER.warning("Cannot discover nodes. Starting standalone.", e); } } catch (Exception e) { LOGGER.warning("Cannot discover nodes. Starting standalone.", e); } return Collections.emptyList(); }
@Test public void discoverNodes() { // given String privateIp = "192.168.1.15"; String publicIp = "38.146.24.2"; given(awsClient.getAddresses()).willReturn(Map.of(privateIp, publicIp)); // when Iterable<DiscoveryNode> nodes = awsDiscoveryStrategy.discoverNodes(); // then List<DiscoveryNode> nodeList = toList(nodes); DiscoveryNode node1 = nodeList.get(0); assertEquals(privateIp, node1.getPrivateAddress().getHost()); assertEquals(PORT1, node1.getPrivateAddress().getPort()); assertEquals(publicIp, node1.getPublicAddress().getHost()); DiscoveryNode node2 = nodeList.get(1); assertEquals(privateIp, node2.getPrivateAddress().getHost()); assertEquals(PORT2, node2.getPrivateAddress().getPort()); assertEquals(publicIp, node2.getPublicAddress().getHost()); }
@VisibleForTesting Optional<Connector> fromConfigs(final Map<String, String> properties) { final String name = properties.get("name"); return Optional.of(new Connector( name, DataSourceType.KTABLE, extractKeyNameFromSmt(properties).orElse(null) )); }
@Test public void shouldCreateJdbcConnectorWithValidConfigsAndSMT() { // Given: final Map<String, String> config = ImmutableMap.of( Connectors.CONNECTOR_CLASS, JdbcSource.JDBC_SOURCE_CLASS, "name", "foo", "transforms", "foobar,createKey", "transforms.createKey.type", "org.apache.kafka.connect.transforms.ExtractField$Key", "transforms.createKey.field", "key" ); // When: final Optional<Connector> maybeConnector = jdbcSource.fromConfigs(config); // Then: final Connector expected = new Connector( "foo", DataSourceType.KTABLE, "key"); assertThat(maybeConnector, OptionalMatchers.of(is(expected))); }
@VisibleForTesting void submit(long requestId, DispatchableSubPlan dispatchableSubPlan, long timeoutMs, Map<String, String> queryOptions) throws Exception { Deadline deadline = Deadline.after(timeoutMs, TimeUnit.MILLISECONDS); // Serialize the stage plans in parallel List<DispatchablePlanFragment> stagePlans = dispatchableSubPlan.getQueryStageList(); Set<QueryServerInstance> serverInstances = new HashSet<>(); // Ignore the reduce stage (stage 0) int numStages = stagePlans.size() - 1; List<CompletableFuture<StageInfo>> stageInfoFutures = new ArrayList<>(numStages); for (int i = 0; i < numStages; i++) { DispatchablePlanFragment stagePlan = stagePlans.get(i + 1); serverInstances.addAll(stagePlan.getServerInstanceToWorkerIdMap().keySet()); stageInfoFutures.add(CompletableFuture.supplyAsync(() -> { ByteString rootNode = PlanNodeSerializer.process(stagePlan.getPlanFragment().getFragmentRoot()).toByteString(); ByteString customProperty = QueryPlanSerDeUtils.toProtoProperties(stagePlan.getCustomProperties()); return new StageInfo(rootNode, customProperty); }, _executorService)); } List<StageInfo> stageInfos = new ArrayList<>(numStages); try { for (CompletableFuture<StageInfo> future : stageInfoFutures) { stageInfos.add(future.get(deadline.timeRemaining(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS)); } } finally { for (CompletableFuture<?> future : stageInfoFutures) { if (!future.isDone()) { future.cancel(true); } } } Map<String, String> requestMetadata = new HashMap<>(); requestMetadata.put(CommonConstants.Query.Request.MetadataKeys.REQUEST_ID, Long.toString(requestId)); requestMetadata.put(CommonConstants.Broker.Request.QueryOptionKey.TIMEOUT_MS, Long.toString(deadline.timeRemaining(TimeUnit.MILLISECONDS))); requestMetadata.putAll(queryOptions); ByteString protoRequestMetadata = QueryPlanSerDeUtils.toProtoProperties(requestMetadata); // Submit the query plan to all servers in parallel int numServers = serverInstances.size(); BlockingQueue<AsyncQueryDispatchResponse> dispatchCallbacks = new ArrayBlockingQueue<>(numServers); for (QueryServerInstance serverInstance : serverInstances) { _executorService.submit(() -> { try { Worker.QueryRequest.Builder requestBuilder = Worker.QueryRequest.newBuilder(); requestBuilder.setVersion(CommonConstants.MultiStageQueryRunner.PlanVersions.V1); for (int i = 0; i < numStages; i++) { int stageId = i + 1; DispatchablePlanFragment stagePlan = stagePlans.get(stageId); List<Integer> workerIds = stagePlan.getServerInstanceToWorkerIdMap().get(serverInstance); if (workerIds != null) { List<WorkerMetadata> stageWorkerMetadataList = stagePlan.getWorkerMetadataList(); List<WorkerMetadata> workerMetadataList = new ArrayList<>(workerIds.size()); for (int workerId : workerIds) { workerMetadataList.add(stageWorkerMetadataList.get(workerId)); } List<Worker.WorkerMetadata> protoWorkerMetadataList = QueryPlanSerDeUtils.toProtoWorkerMetadataList(workerMetadataList); StageInfo stageInfo = stageInfos.get(i); Worker.StageMetadata stageMetadata = Worker.StageMetadata.newBuilder().setStageId(stageId).addAllWorkerMetadata(protoWorkerMetadataList) .setCustomProperty(stageInfo._customProperty).build(); requestBuilder.addStagePlan( Worker.StagePlan.newBuilder().setRootNode(stageInfo._rootNode).setStageMetadata(stageMetadata) .build()); } } requestBuilder.setMetadata(protoRequestMetadata); getOrCreateDispatchClient(serverInstance).submit(requestBuilder.build(), serverInstance, deadline, dispatchCallbacks::offer); } catch (Throwable t) { LOGGER.warn("Caught exception while dispatching query: {} to server: {}", requestId, serverInstance, t); dispatchCallbacks.offer(new AsyncQueryDispatchResponse(serverInstance, null, t)); } }); } int numSuccessCalls = 0; // TODO: Cancel all dispatched requests if one of the dispatch errors out or deadline is breached. while (!deadline.isExpired() && numSuccessCalls < numServers) { AsyncQueryDispatchResponse resp = dispatchCallbacks.poll(deadline.timeRemaining(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); if (resp != null) { if (resp.getThrowable() != null) { throw new RuntimeException( String.format("Error dispatching query: %d to server: %s", requestId, resp.getServerInstance()), resp.getThrowable()); } else { Worker.QueryResponse response = resp.getQueryResponse(); assert response != null; if (response.containsMetadata(CommonConstants.Query.Response.ServerResponseStatus.STATUS_ERROR)) { throw new RuntimeException( String.format("Unable to execute query plan for request: %d on server: %s, ERROR: %s", requestId, resp.getServerInstance(), response.getMetadataOrDefault(CommonConstants.Query.Response.ServerResponseStatus.STATUS_ERROR, "null"))); } numSuccessCalls++; } } } if (deadline.isExpired()) { throw new TimeoutException("Timed out waiting for response of async query-dispatch"); } }
@Test public void testQueryDispatcherThrowsWhenQueryServerCallsOnError() { String sql = "SELECT * FROM a WHERE col1 = 'foo'"; QueryServer failingQueryServer = _queryServerMap.values().iterator().next(); Mockito.doAnswer(invocationOnMock -> { StreamObserver<Worker.QueryResponse> observer = invocationOnMock.getArgument(1); observer.onError(new RuntimeException("foo")); return null; }).when(failingQueryServer).submit(Mockito.any(), Mockito.any()); DispatchableSubPlan dispatchableSubPlan = _queryEnvironment.planQuery(sql); try { _queryDispatcher.submit(REQUEST_ID_GEN.getAndIncrement(), dispatchableSubPlan, 10_000L, Collections.emptyMap()); Assert.fail("Method call above should have failed"); } catch (Exception e) { Assert.assertTrue(e.getMessage().contains("Error dispatching query")); } Mockito.reset(failingQueryServer); }
public static boolean isPathPresent(MaskTree filter, PathSpec path) { return !getPresentPaths(filter, Collections.singleton(path)).isEmpty(); }
@Test public void testPositiveWithWildcardSinglePath() { final MaskTree filter = new MaskTree(); filter.addOperation(new PathSpec("foo", PathSpec.WILDCARD, "baz"), MaskOperation.POSITIVE_MASK_OP); Assert.assertTrue(ProjectionUtil.isPathPresent(filter, new PathSpec("foo"))); Assert.assertTrue(ProjectionUtil.isPathPresent(filter, new PathSpec("foo", "bar"))); Assert.assertTrue(ProjectionUtil.isPathPresent(filter, new PathSpec("foo", "bar", "baz"))); Assert.assertTrue(ProjectionUtil.isPathPresent(filter, new PathSpec("foo", "bar", "baz", "xyz"))); Assert.assertTrue(ProjectionUtil.isPathPresent(filter, new PathSpec("foo", "bar", "baz", "abc", "xyz"))); Assert.assertFalse(ProjectionUtil.isPathPresent(filter, new PathSpec("foo", "bar", "xyz"))); }
public static Map<String, URI> outputFiles(RunContext runContext, List<String> outputs) throws Exception { List<Path> allFilesMatching = runContext.workingDir().findAllFilesMatching(outputs); var outputFiles = allFilesMatching.stream() .map(throwFunction(path -> new AbstractMap.SimpleEntry<>( runContext.workingDir().path().relativize(path).toString(), runContext.storage().putFile(path.toFile(), resolveUniqueNameForFile(path)) ))) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); if (runContext.logger().isTraceEnabled()) { runContext.logger().trace("Captured {} output(s).", allFilesMatching.size()); } return outputFiles; }
@Test void outputFiles() throws Exception { RunContext runContext = runContextFactory.of(); Map<String, String> files = FilesService.inputFiles(runContext, Map.of("file.txt", "content")); Map<String, URI> outputs = FilesService.outputFiles(runContext, files.keySet().stream().toList()); assertThat(outputs.size(), is(1)); }
protected void setSelectedLayout(Layout selectedLayout) { Layout oldValue = this.selectedLayout; this.selectedLayout = selectedLayout; this.selectedBuilder = selectedLayout != null ? selectedLayout.getBuilder() : null; if (oldValue != null) { saveProperties(oldValue); } injectGraph(); if (selectedLayout != null) { loadProperties(selectedLayout); } firePropertyChangeEvent(SELECTED_LAYOUT, oldValue, selectedLayout); }
@Test public void testLocalPropertyReset() throws Exception { LayoutModelImpl layoutModel = Utils.newLayoutModel(); MockLayout layout = new MockLayoutBuilder().buildLayout(); Assert.assertNotEquals(42.0, layout.getLocalProperty(), 0.0); layoutModel.setSelectedLayout(layout); Assert.assertEquals(42.0, layout.getLocalProperty(), 0.0); }
public static List<String> finalDestination(List<String> elements) { if (isMagicPath(elements)) { List<String> destDir = magicPathParents(elements); List<String> children = magicPathChildren(elements); checkArgument(!children.isEmpty(), "No path found under the prefix " + MAGIC_PATH_PREFIX); ArrayList<String> dest = new ArrayList<>(destDir); if (containsBasePath(children)) { // there's a base marker in the path List<String> baseChildren = basePathChildren(children); checkArgument(!baseChildren.isEmpty(), "No path found under " + BASE); dest.addAll(baseChildren); } else { dest.add(filename(children)); } return dest; } else { return elements; } }
@Test public void testFinalDestinationIgnoresBaseBeforeMagic() { assertEquals(l(BASE, "home", "3.txt"), finalDestination(l(BASE, "home", MAGIC_PATH_PREFIX, "2", "3.txt"))); }
@Override protected void doFilterInternal(@NonNull HttpServletRequest request, @NonNull HttpServletResponse response, @NonNull FilterChain filterChain) throws ServletException, IOException { String localNamespace = MetadataContext.LOCAL_NAMESPACE; String localService = MetadataContext.LOCAL_SERVICE; Set<Argument> arguments = rateLimitRuleArgumentResolver.getArguments(request, localNamespace, localService); try { QuotaResponse quotaResponse = QuotaCheckUtils.getQuota(limitAPI, localNamespace, localService, 1, arguments, request.getRequestURI()); if (quotaResponse.getCode() == QuotaResultCode.QuotaResultLimited) { if (!Objects.isNull(polarisRateLimiterLimitedFallback)) { response.setStatus(polarisRateLimiterLimitedFallback.rejectHttpCode()); String contentType = new MediaType(polarisRateLimiterLimitedFallback.mediaType(), polarisRateLimiterLimitedFallback.charset()).toString(); response.setContentType(contentType); response.getWriter().write(polarisRateLimiterLimitedFallback.rejectTips()); } else { response.setStatus(polarisRateLimitProperties.getRejectHttpCode()); response.setContentType("text/html;charset=UTF-8"); response.getWriter().write(rejectTips); } response.addHeader(HeaderConstant.INTERNAL_CALLEE_RET_STATUS, RetStatus.RetFlowControl.getDesc()); if (Objects.nonNull(quotaResponse.getActiveRule())) { try { String encodedActiveRuleName = URLEncoder.encode( quotaResponse.getActiveRuleName(), UTF_8); response.addHeader(HeaderConstant.INTERNAL_ACTIVE_RULE_NAME, encodedActiveRuleName); } catch (UnsupportedEncodingException e) { LOG.error("Cannot encode {} for header internal-callee-activerule.", quotaResponse.getActiveRuleName(), e); } } return; } // Unirate if (quotaResponse.getCode() == QuotaResultCode.QuotaResultOk && quotaResponse.getWaitMs() > 0) { LOG.debug("The request of [{}] will waiting for {}ms.", request.getRequestURI(), quotaResponse.getWaitMs()); Thread.sleep(quotaResponse.getWaitMs()); } } catch (Throwable t) { // An exception occurs in the rate limiting API call, // which should not affect the call of the business process. LOG.error("fail to invoke getQuota, service is " + localService, t); } filterChain.doFilter(request, response); }
@Test public void testDoFilterInternal() { // Create mock FilterChain FilterChain filterChain = (servletRequest, servletResponse) -> { }; // Mock request MockHttpServletRequest request = new MockHttpServletRequest(); quotaCheckServletFilter.init(); quotaCheckWithHtmlRejectTipsServletFilter.init(); try { // Pass MetadataContext.LOCAL_SERVICE = "TestApp1"; MockHttpServletResponse testApp1Response = new MockHttpServletResponse(); quotaCheckServletFilter.doFilterInternal(request, testApp1Response, filterChain); // Unirate waiting 1000ms MetadataContext.LOCAL_SERVICE = "TestApp2"; MockHttpServletResponse testApp2Response = new MockHttpServletResponse(); long startTimestamp = System.currentTimeMillis(); quotaCheckServletFilter.doFilterInternal(request, testApp2Response, filterChain); assertThat(System.currentTimeMillis() - startTimestamp).isGreaterThanOrEqualTo(1000L); // Rate limited MetadataContext.LOCAL_SERVICE = "TestApp3"; MockHttpServletResponse testApp3Response = new MockHttpServletResponse(); quotaCheckServletFilter.doFilterInternal(request, testApp3Response, filterChain); assertThat(testApp3Response.getStatus()).isEqualTo(419); assertThat(testApp3Response.getContentAsString()).isEqualTo("RejectRequestTips提示消息"); assertThat(testApp3Response.getHeader(HeaderConstant.INTERNAL_ACTIVE_RULE_NAME)).isEqualTo("MOCK_RULE"); MockHttpServletResponse testApp3Response2 = new MockHttpServletResponse(); quotaCheckWithHtmlRejectTipsServletFilter.doFilterInternal(request, testApp3Response2, filterChain); assertThat(testApp3Response2.getStatus()).isEqualTo(419); assertThat(testApp3Response2.getContentAsString()).isEqualTo("<h1>RejectRequestTips提示消息</h1>"); // Exception MockHttpServletResponse testApp4Response = new MockHttpServletResponse(); MetadataContext.LOCAL_SERVICE = "TestApp4"; quotaCheckServletFilter.doFilterInternal(request, testApp4Response, filterChain); } catch (ServletException | IOException e) { fail("Exception encountered.", e); } }
static public int getJDKVersion(String javaVersionStr) { int version = 0; for (char ch : javaVersionStr.toCharArray()) { if (Character.isDigit(ch)) { version = (version * 10) + (ch - 48); } else if (version == 1) { version = 0; } else { break; } } return version; }
@Test public void jdkVersion() { Assertions.assertEquals(4, EnvUtil.getJDKVersion("1.4.xx")); Assertions.assertEquals(5, EnvUtil.getJDKVersion("1.5")); Assertions.assertEquals(5, EnvUtil.getJDKVersion("1.5.xx")); Assertions.assertEquals(5, EnvUtil.getJDKVersion("1.5AA")); Assertions.assertEquals(9, EnvUtil.getJDKVersion("9EA")); Assertions.assertEquals(9, EnvUtil.getJDKVersion("9.0.1")); Assertions.assertEquals(18, EnvUtil.getJDKVersion("18.3+xx")); Assertions.assertEquals(21, EnvUtil.getJDKVersion("21.0.1")); }
public static void modifyStaticFinalField(Field staticField, Object newValue) throws NoSuchFieldException, IllegalAccessException { if (staticField == null) { throw new IllegalArgumentException("staticField must be not null"); } // check is static field if (!Modifier.isStatic(staticField.getModifiers())) { throw new IllegalArgumentException("the `" + fieldToString(staticField) + "` is not a static field, cannot modify value."); } // remove the `final` keyword from the field if (Modifier.isFinal(staticField.getModifiers())) { // In java17, can't get the field `modifiers` from class `java.lang.reflect.Field`. Field modifiers = staticField.getClass().getDeclaredField("modifiers"); modifiers.setAccessible(true); modifiers.setInt(staticField, staticField.getModifiers() & ~Modifier.FINAL); } // set new value staticField.setAccessible(true); staticField.set(staticField.getDeclaringClass(), newValue); }
@Test @EnabledOnJre({JRE.JAVA_8, JRE.JAVA_11}) // `ReflectionUtil.modifyStaticFinalField` does not supported java17 and above versions public void testModifyStaticFinalField() throws NoSuchFieldException, IllegalAccessException { Assertions.assertEquals("hello", testValue); ReflectionUtil.modifyStaticFinalField(ReflectionUtilTest.class, "testValue", "hello world"); Assertions.assertEquals("hello world", testValue); // case: not a static field Assertions.assertThrows(IllegalArgumentException.class, () -> { ReflectionUtil.modifyStaticFinalField(ReflectionUtilTest.class, "testValue2", "hello"); }); }
@Override public Map<String, Metric> getMetrics() { return metricRegistry.getMetrics(); }
@Test public void shouldReturnTotalNumberOfRequestsAs5ForFailAsync() { AsyncHelloWorldService helloWorldService = mock(AsyncHelloWorldService.class); ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(); CompletableFuture<String> failedFuture = new CompletableFuture<>(); failedFuture.completeExceptionally(new HelloWorldException()); given(helloWorldService.returnHelloWorld()).willReturn(failedFuture); Retry retry = Retry.of("metrics", RetryConfig.<String>custom() .retryExceptions(Exception.class) .maxAttempts(5) .failAfterMaxAttempts(true) .build()); Supplier<CompletionStage<String>> supplier = Retry.decorateCompletionStage(retry, scheduler, helloWorldService::returnHelloWorld); assertThat(supplier.get()) .failsWithin(5, TimeUnit.SECONDS) .withThrowableOfType(ExecutionException.class) .havingCause(); assertThat(retry.getMetrics().getNumberOfTotalCalls()).isEqualTo(5); }
@Override public String toString() { StringBuilder circuitBreakerConfig = new StringBuilder("CircuitBreakerConfig {"); circuitBreakerConfig.append("recordExceptionPredicate="); circuitBreakerConfig.append(recordExceptionPredicate); circuitBreakerConfig.append(", ignoreExceptionPredicate="); circuitBreakerConfig.append(ignoreExceptionPredicate); circuitBreakerConfig.append(", recordExceptions="); circuitBreakerConfig.append(Arrays.toString(recordExceptions)); circuitBreakerConfig.append(", ignoreExceptions="); circuitBreakerConfig.append(Arrays.toString(ignoreExceptions)); circuitBreakerConfig.append(", failureRateThreshold="); circuitBreakerConfig.append(failureRateThreshold); circuitBreakerConfig.append(", permittedNumberOfCallsInHalfOpenState="); circuitBreakerConfig.append(permittedNumberOfCallsInHalfOpenState); circuitBreakerConfig.append(", slidingWindowSize="); circuitBreakerConfig.append(slidingWindowSize); circuitBreakerConfig.append(", slidingWindowType="); circuitBreakerConfig.append(slidingWindowType); circuitBreakerConfig.append(", minimumNumberOfCalls="); circuitBreakerConfig.append(minimumNumberOfCalls); circuitBreakerConfig.append(", writableStackTraceEnabled="); circuitBreakerConfig.append(writableStackTraceEnabled); circuitBreakerConfig.append(", automaticTransitionFromOpenToHalfOpenEnabled="); circuitBreakerConfig.append(automaticTransitionFromOpenToHalfOpenEnabled); circuitBreakerConfig.append(", waitIntervalFunctionInOpenState="); circuitBreakerConfig.append(waitIntervalFunctionInOpenState); circuitBreakerConfig.append(", slowCallRateThreshold="); circuitBreakerConfig.append(slowCallRateThreshold); circuitBreakerConfig.append(", slowCallDurationThreshold="); circuitBreakerConfig.append(slowCallDurationThreshold); circuitBreakerConfig.append("}"); return circuitBreakerConfig.toString(); }
@Test public void testToString() { CircuitBreakerConfig config = custom() .slidingWindowSize(5) .recordExceptions(RuntimeException.class) .automaticTransitionFromOpenToHalfOpenEnabled(true) .slidingWindowType(SlidingWindowType.TIME_BASED).build(); String result = config.toString(); assertThat(result).startsWith("CircuitBreakerConfig {"); assertThat(result).contains("slidingWindowSize=5"); assertThat(result).contains("recordExceptions=[class java.lang.RuntimeException]"); assertThat(result).contains("automaticTransitionFromOpenToHalfOpenEnabled=true"); assertThat(result).contains("slidingWindowType=TIME_BASED"); assertThat(result).endsWith("}"); }
@Override public Dimension render(Graphics2D graphics) { Font originalFont = null; if (font != null) { originalFont = graphics.getFont(); graphics.setFont(font); } final FontMetrics fontMetrics = graphics.getFontMetrics(); Matcher matcher = COL_TAG_PATTERN.matcher(text); Color textColor = color; int idx = 0; int width = 0; while (matcher.find()) { String color = matcher.group(1); String s = text.substring(idx, matcher.start()); idx = matcher.end(); renderText(graphics, textColor, position.x + width, position.y, s); width += fontMetrics.stringWidth(s); textColor = Color.decode("#" + color); } { String s = text.substring(idx); renderText(graphics, textColor, position.x + width, position.y, s); width += fontMetrics.stringWidth(s); } int height = fontMetrics.getHeight(); if (originalFont != null) { graphics.setFont(originalFont); } return new Dimension(width, height); }
@Test public void testRender4() { TextComponent textComponent = new TextComponent(); textComponent.setText("test<col=0000ff>test2"); textComponent.render(graphics); InOrder g = inOrder(graphics); g.verify(graphics).setColor(Color.BLACK); g.verify(graphics).drawString(eq("test"), anyInt(), anyInt()); g.verify(graphics).setColor(Color.WHITE); g.verify(graphics).drawString(eq("test"), anyInt(), anyInt()); g.verify(graphics).setColor(Color.BLACK); g.verify(graphics).drawString(eq("test2"), anyInt(), anyInt()); g.verify(graphics).setColor(Color.BLUE); g.verify(graphics).drawString(eq("test2"), anyInt(), anyInt()); }
public static int compareRanges(byte[] left, int leftOffset, int leftLength, byte[] right, int rightOffset, int rightLength) { long leftAddress = ARRAY_BYTE_BASE_OFFSET + leftOffset; long rightAddress = ARRAY_BYTE_BASE_OFFSET + rightOffset; int lengthToCompare = min(leftLength, rightLength); while (lengthToCompare >= SIZE_OF_LONG) { long leftLong = unsafe.getLong(left, leftAddress); long rightLong = unsafe.getLong(right, rightAddress); if (leftLong != rightLong) { return longBytesToLong(leftLong) < longBytesToLong(rightLong) ? -1 : 1; } leftAddress += SIZE_OF_LONG; rightAddress += SIZE_OF_LONG; lengthToCompare -= SIZE_OF_LONG; } while (lengthToCompare > 0) { int compareResult = compareUnsignedBytes(unsafe.getByte(left, leftAddress), unsafe.getByte(right, rightAddress)); if (compareResult != 0) { return compareResult; } leftAddress++; rightAddress++; lengthToCompare--; } return Integer.compare(leftLength, rightLength); }
@Test public void testCompareRanges() { assertEquals(compareRanges("abcdefghij".getBytes(), 0, 10, "abcdefghij".getBytes(), 0, 10), 0); assertEquals(compareRanges("123abcdefghij".getBytes(), 3, 10, "abcdefghij".getBytes(), 0, 10), 0); }
public String addFileStore(FileStoreInfo fsInfo) throws DdlException { try { return client.addFileStore(fsInfo, serviceId); } catch (StarClientException e) { throw new DdlException("Failed to add file store, error: " + e.getMessage()); } }
@Test public void testAddFileStore() throws StarClientException, DdlException { S3FileStoreInfo s3FsInfo = S3FileStoreInfo.newBuilder() .setRegion("region").setEndpoint("endpoint").build(); FileStoreInfo fsInfo = FileStoreInfo.newBuilder().setFsKey("test-fskey") .setFsName("test-fsname").setFsType(FileStoreType.S3).setS3FsInfo(s3FsInfo).build(); new Expectations() { { client.addFileStore(fsInfo, "1"); result = fsInfo.getFsKey(); minTimes = 0; client.addFileStore(fsInfo, "2"); result = new StarClientException(StatusCode.INVALID_ARGUMENT, "mocked exception"); } }; Deencapsulation.setField(starosAgent, "serviceId", "1"); Assert.assertEquals("test-fskey", starosAgent.addFileStore(fsInfo)); Deencapsulation.setField(starosAgent, "serviceId", "2"); ExceptionChecker.expectThrowsWithMsg(DdlException.class, "Failed to add file store, error: INVALID_ARGUMENT:mocked exception", () -> starosAgent.addFileStore(fsInfo)); }
@Override public void validateRoleList(Collection<Long> ids) { if (CollUtil.isEmpty(ids)) { return; } // 获得角色信息 List<RoleDO> roles = roleMapper.selectBatchIds(ids); Map<Long, RoleDO> roleMap = convertMap(roles, RoleDO::getId); // 校验 ids.forEach(id -> { RoleDO role = roleMap.get(id); if (role == null) { throw exception(ROLE_NOT_EXISTS); } if (!CommonStatusEnum.ENABLE.getStatus().equals(role.getStatus())) { throw exception(ROLE_IS_DISABLE, role.getName()); } }); }
@Test public void testValidateRoleList_notEnable() { // mock 数据 RoleDO RoleDO = randomPojo(RoleDO.class, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus())); roleMapper.insert(RoleDO); // 准备参数 List<Long> ids = singletonList(RoleDO.getId()); // 调用, 并断言异常 assertServiceException(() -> roleService.validateRoleList(ids), ROLE_IS_DISABLE, RoleDO.getName()); }
public static FromEndOfWindow pastEndOfWindow() { return new FromEndOfWindow(); }
@Test public void testEarlyAndLateFiringsToString() { Trigger trigger = AfterWatermark.pastEndOfWindow() .withEarlyFirings(StubTrigger.named("t1")) .withLateFirings(StubTrigger.named("t2")); assertEquals( "AfterWatermark.pastEndOfWindow().withEarlyFirings(t1).withLateFirings(t2)", trigger.toString()); }
public StatementExecutorResponse execute( final ConfiguredStatement<? extends Statement> statement, final KsqlExecutionContext executionContext, final KsqlSecurityContext securityContext ) { final String commandRunnerWarningString = commandRunnerWarning.get(); if (!commandRunnerWarningString.equals("")) { throw new KsqlServerException("Failed to handle Ksql Statement." + System.lineSeparator() + commandRunnerWarningString); } final InjectorWithSideEffects injector = InjectorWithSideEffects.wrap( injectorFactory.apply(executionContext, securityContext.getServiceContext())); final ConfiguredStatementWithSideEffects<?> injectedWithSideEffects = injector.injectWithSideEffects(statement); try { return executeInjected( injectedWithSideEffects.getStatement(), statement, executionContext, securityContext); } catch (Exception e) { injector.revertSideEffects(injectedWithSideEffects); throw e; } }
@Test public void shouldThrowExceptionOnFailureToEnqueue() { // Given: final KsqlException cause = new KsqlException("fail"); when(queue.enqueueCommand(any(), any(), any())).thenThrow(cause); // When: final KsqlStatementException e = assertThrows( KsqlStatementException.class, () -> distributor.execute(CONFIGURED_STATEMENT, executionContext, securityContext) ); // Then: assertThat(e.getMessage(), containsString("Could not write the statement into the command topic.")); assertThat(e.getUnloggedMessage(), containsString( "Could not write the statement 'statement' into the command topic.")); assertThat(e.getSqlStatement(), containsString("statement")); assertThat(e.getCause(), (is(cause))); verify(transactionalProducer, times(1)).abortTransaction(); }
@Override public void reset() { containers.clear(); }
@Test public void reset() { final String foo = "foo"; final String bar = "bar"; service.getOrCreateContainer( service.getRingbufferPartitionId(foo), RingbufferService.getRingbufferNamespace(foo), service.getRingbufferConfig(foo)); service.getOrCreateContainer( service.getRingbufferPartitionId(bar), RingbufferService.getRingbufferNamespace(bar), service.getRingbufferConfig(bar)); service.reset(); assertEquals(0, service.getContainers().size()); }
public static List<Integer> getJoinOrder(JoinGraph graph) { ImmutableList.Builder<PlanNode> joinOrder = ImmutableList.builder(); Map<PlanNodeId, Integer> priorities = new HashMap<>(); for (int i = 0; i < graph.size(); i++) { priorities.put(graph.getNode(i).getId(), i); } PriorityQueue<PlanNode> nodesToVisit = new PriorityQueue<>( graph.size(), comparing(node -> priorities.get(node.getId()))); Set<PlanNode> visited = new HashSet<>(); nodesToVisit.add(graph.getNode(0)); while (!nodesToVisit.isEmpty()) { PlanNode node = nodesToVisit.poll(); if (!visited.contains(node)) { visited.add(node); joinOrder.add(node); for (JoinGraph.Edge edge : graph.getEdges(node)) { nodesToVisit.add(edge.getTargetNode()); } } if (nodesToVisit.isEmpty() && visited.size() < graph.size()) { // disconnected graph, find new starting point Optional<PlanNode> firstNotVisitedNode = graph.getNodes().stream() .filter(graphNode -> !visited.contains(graphNode)) .findFirst(); if (firstNotVisitedNode.isPresent()) { nodesToVisit.add(firstNotVisitedNode.get()); } } } checkState(visited.size() == graph.size()); return joinOrder.build().stream() .map(node -> priorities.get(node.getId())) .collect(toImmutableList()); }
@Test public void testDonNotChangeOrderWithoutCrossJoin() { PlanNode plan = joinNode( joinNode( values(variable("a")), values(variable("b")), variable("a"), variable("b")), values(variable("c")), variable("c"), variable("b")); JoinGraph joinGraph = getOnlyElement(JoinGraph.buildFrom(plan)); assertEquals( getJoinOrder(joinGraph), ImmutableList.of(0, 1, 2)); }
@Description("Returns a Geometry type Point object with the given coordinate values") @ScalarFunction("ST_Point") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stPoint(@SqlType(DOUBLE) double x, @SqlType(DOUBLE) double y) { return serialize(createJtsPoint(x, y)); }
@Test public void testGeometryGetObjectValue() { BlockBuilder builder = GEOMETRY.createBlockBuilder(null, 1); GEOMETRY.writeSlice(builder, GeoFunctions.stPoint(1.2, 3.4)); Block block = builder.build(); assertEquals("POINT (1.2 3.4)", GEOMETRY.getObjectValue(null, block, 0)); }
@Override public PageResult<ApiErrorLogDO> getApiErrorLogPage(ApiErrorLogPageReqVO pageReqVO) { return apiErrorLogMapper.selectPage(pageReqVO); }
@Test public void testGetApiErrorLogPage() { // mock 数据 ApiErrorLogDO apiErrorLogDO = randomPojo(ApiErrorLogDO.class, o -> { o.setUserId(2233L); o.setUserType(UserTypeEnum.ADMIN.getValue()); o.setApplicationName("yudao-test"); o.setRequestUrl("foo"); o.setExceptionTime(buildTime(2021, 3, 13)); o.setProcessStatus(ApiErrorLogProcessStatusEnum.INIT.getStatus()); }); apiErrorLogMapper.insert(apiErrorLogDO); // 测试 userId 不匹配 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, o -> o.setUserId(3344L))); // 测试 userType 不匹配 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, o -> o.setUserType(UserTypeEnum.MEMBER.getValue()))); // 测试 applicationName 不匹配 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, o -> o.setApplicationName("test"))); // 测试 requestUrl 不匹配 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, o -> o.setRequestUrl("bar"))); // 测试 exceptionTime 不匹配:构造一个早期时间 2021-02-06 00:00:00 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, o -> o.setExceptionTime(buildTime(2021, 2, 6)))); // 测试 progressStatus 不匹配 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, logDO -> logDO.setProcessStatus(ApiErrorLogProcessStatusEnum.DONE.getStatus()))); // 准备参数 ApiErrorLogPageReqVO reqVO = new ApiErrorLogPageReqVO(); reqVO.setUserId(2233L); reqVO.setUserType(UserTypeEnum.ADMIN.getValue()); reqVO.setApplicationName("yudao-test"); reqVO.setRequestUrl("foo"); reqVO.setExceptionTime(buildBetweenTime(2021, 3, 1, 2021, 3, 31)); reqVO.setProcessStatus(ApiErrorLogProcessStatusEnum.INIT.getStatus()); // 调用 PageResult<ApiErrorLogDO> pageResult = apiErrorLogService.getApiErrorLogPage(reqVO); // 断言,只查到了一条符合条件的 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(apiErrorLogDO, pageResult.getList().get(0)); }
@Override public WebSocketServerExtension handshakeExtension(WebSocketExtensionData extensionData) { if (!PERMESSAGE_DEFLATE_EXTENSION.equals(extensionData.name())) { return null; } boolean deflateEnabled = true; int clientWindowSize = MAX_WINDOW_SIZE; int serverWindowSize = MAX_WINDOW_SIZE; boolean serverNoContext = false; boolean clientNoContext = false; Iterator<Entry<String, String>> parametersIterator = extensionData.parameters().entrySet().iterator(); while (deflateEnabled && parametersIterator.hasNext()) { Entry<String, String> parameter = parametersIterator.next(); if (CLIENT_MAX_WINDOW.equalsIgnoreCase(parameter.getKey())) { // use preferred clientWindowSize because client is compatible with customization clientWindowSize = preferredClientWindowSize; } else if (SERVER_MAX_WINDOW.equalsIgnoreCase(parameter.getKey())) { // use provided windowSize if it is allowed if (allowServerWindowSize) { serverWindowSize = Integer.parseInt(parameter.getValue()); if (serverWindowSize > MAX_WINDOW_SIZE || serverWindowSize < MIN_WINDOW_SIZE) { deflateEnabled = false; } } else { deflateEnabled = false; } } else if (CLIENT_NO_CONTEXT.equalsIgnoreCase(parameter.getKey())) { // use preferred clientNoContext because client is compatible with customization clientNoContext = preferredClientNoContext; } else if (SERVER_NO_CONTEXT.equalsIgnoreCase(parameter.getKey())) { // use server no context if allowed if (allowServerNoContext) { serverNoContext = true; } else { deflateEnabled = false; } } else { // unknown parameter deflateEnabled = false; } } if (deflateEnabled) { return new PermessageDeflateExtension(compressionLevel, serverNoContext, serverWindowSize, clientNoContext, clientWindowSize, extensionFilterProvider); } else { return null; } }
@Test public void testNormalHandshake() { WebSocketServerExtension extension; WebSocketExtensionData data; Map<String, String> parameters; // initialize PerMessageDeflateServerExtensionHandshaker handshaker = new PerMessageDeflateServerExtensionHandshaker(); // execute extension = handshaker.handshakeExtension( new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, Collections.<String, String>emptyMap())); // test assertNotNull(extension); assertEquals(WebSocketServerExtension.RSV1, extension.rsv()); assertTrue(extension.newExtensionDecoder() instanceof PerMessageDeflateDecoder); assertTrue(extension.newExtensionEncoder() instanceof PerMessageDeflateEncoder); // execute data = extension.newReponseData(); assertEquals(PERMESSAGE_DEFLATE_EXTENSION, data.name()); assertTrue(data.parameters().isEmpty()); // initialize parameters = new HashMap<String, String>(); parameters.put(CLIENT_MAX_WINDOW, null); parameters.put(CLIENT_NO_CONTEXT, null); // execute extension = handshaker.handshakeExtension( new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, Collections.<String, String>emptyMap())); // test assertNotNull(extension); assertEquals(WebSocketServerExtension.RSV1, extension.rsv()); assertTrue(extension.newExtensionDecoder() instanceof PerMessageDeflateDecoder); assertTrue(extension.newExtensionEncoder() instanceof PerMessageDeflateEncoder); // execute data = extension.newReponseData(); // test assertEquals(PERMESSAGE_DEFLATE_EXTENSION, data.name()); assertTrue(data.parameters().isEmpty()); // initialize parameters = new HashMap<String, String>(); parameters.put(SERVER_MAX_WINDOW, "12"); parameters.put(SERVER_NO_CONTEXT, null); // execute extension = handshaker.handshakeExtension( new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, parameters)); // test assertNull(extension); }
@Override public UserIdentity getUserIdentity(HttpServletRequest request, String name) { // ConfigurableSpnegoAuthenticator may pass names in servicename/host format but we only store the servicename int nameHostSeparatorIndex = name.indexOf('/'); String serviceName = nameHostSeparatorIndex > 0 ? name.substring(0, nameHostSeparatorIndex) : name; UserIdentity serviceIdentity = _serviceUserStore.getUserIdentity(serviceName); if (_trustedProxyIpPattern != null) { return _trustedProxyIpPattern.matcher(request.getRemoteAddr()).matches() ? serviceIdentity : null; } else { return serviceIdentity; } }
@Test public void testSuccessfulLoginWithIpFiltering() throws Exception { TrustedProxyAuthorizationService srv = new TrustedProxyAuthorizationService(Collections.singletonList(AUTH_SERVICE_NAME), IP_FILTER); HttpServletRequest mockRequest = mock(HttpServletRequest.class); expect(mockRequest.getRemoteAddr()).andReturn("192.168.0.1"); replay(mockRequest); srv.start(); try { UserIdentity result = srv.getUserIdentity(mockRequest, AUTH_SERVICE_NAME); assertNotNull(result); assertEquals(AUTH_SERVICE_NAME, result.getUserPrincipal().getName()); verify(mockRequest); } finally { srv.stop(); } }
@Override public <T> T serialize(final Serializer<T> dict) { dict.setStringForKey(String.valueOf(type), "Type"); dict.setStringForKey(this.getAbsolute(), "Remote"); if(symlink != null) { dict.setObjectForKey(symlink, "Symbolic Link"); } dict.setObjectForKey(attributes, "Attributes"); return dict.getSerialized(); }
@Test public void testDictionaryFile() { Path path = new Path("/path", EnumSet.of(Path.Type.file)); assertEquals(path, new PathDictionary<>().deserialize((path.serialize(SerializerFactory.get())))); }
static String generateDatabaseName(String baseString) { return generateResourceId( baseString, ILLEGAL_DATABASE_NAME_CHARS, REPLACE_DATABASE_NAME_CHAR, MAX_DATABASE_NAME_LENGTH, TIME_FORMAT); }
@Test public void testGenerateDatabaseNameShouldReplaceDollarSign() { String testBaseString = "Test$DB$Name"; String actual = generateDatabaseName(testBaseString); assertThat(actual).matches("test-db-name-\\d{8}-\\d{6}-\\d{6}"); }
public static Format of(final FormatInfo formatInfo) { final Format format = fromName(formatInfo.getFormat().toUpperCase()); format.validateProperties(formatInfo.getProperties()); return format; }
@Test public void shouldThrowOnUnknownFormat() { // When: final Exception e = assertThrows( KsqlException.class, () -> of(FormatInfo.of("bob")) ); // Then: assertThat(e.getMessage(), containsString("Unknown format: BOB")); }
public Flow injectDefaults(Flow flow, Execution execution) { try { return this.injectDefaults(flow); } catch (Exception e) { RunContextLogger .logEntries( Execution.loggingEventFromException(e), LogEntry.of(execution) ) .forEach(logQueue::emitAsync); return flow; } }
@Test public void forced() { DefaultTester task = DefaultTester.builder() .id("test") .type(DefaultTester.class.getName()) .set(666) .build(); Flow flow = Flow.builder() .tasks(Collections.singletonList(task)) .pluginDefaults(List.of( new PluginDefault(DefaultTester.class.getName(), true, ImmutableMap.of( "set", 123 )), new PluginDefault(DefaultTester.class.getName(), true, ImmutableMap.of( "set", 789 )), new PluginDefault(DefaultTester.class.getName(), false, ImmutableMap.of( "value", 1, "set", 456, "arrays", Collections.singletonList(1) )) )) .build(); Flow injected = pluginDefaultService.injectDefaults(flow); assertThat(((DefaultTester) injected.getTasks().getFirst()).getSet(), is(123)); }
@Override public void pre(SpanAdapter span, Exchange exchange, Endpoint endpoint) { super.pre(span, exchange, endpoint); span.setTag(TagConstants.DB_SYSTEM, "sql"); String query = exchange.getIn().getHeader(CAMEL_SQL_QUERY, String.class); if (query != null) { span.setTag(TagConstants.DB_STATEMENT, query); } }
@Test public void testPre() { Endpoint endpoint = Mockito.mock(Endpoint.class); Exchange exchange = Mockito.mock(Exchange.class); Message message = Mockito.mock(Message.class); Mockito.when(endpoint.getEndpointUri()).thenReturn("test"); Mockito.when(exchange.getIn()).thenReturn(message); Mockito.when(message.getHeader(SqlSpanDecorator.CAMEL_SQL_QUERY, String.class)).thenReturn(SQL_STATEMENT); SpanDecorator decorator = new SqlSpanDecorator(); MockSpanAdapter span = new MockSpanAdapter(); decorator.pre(span, exchange, endpoint); assertEquals("sql", span.tags().get(TagConstants.DB_SYSTEM)); assertEquals(SQL_STATEMENT, span.tags().get(TagConstants.DB_STATEMENT)); }
public static int validateValidHeaderValue(CharSequence value) { int length = value.length(); if (length == 0) { return -1; } if (value instanceof AsciiString) { return verifyValidHeaderValueAsciiString((AsciiString) value); } return verifyValidHeaderValueCharSequence(value); }
@Test void emptyValuesHaveNoIllegalCharsAsciiString() { assertEquals(-1, validateValidHeaderValue(AsciiString.EMPTY_STRING)); }
@Modified public void modified(ComponentContext context) { if (context == null) { netconfReplyTimeout = NETCONF_REPLY_TIMEOUT_DEFAULT; netconfConnectTimeout = NETCONF_CONNECT_TIMEOUT_DEFAULT; netconfIdleTimeout = NETCONF_IDLE_TIMEOUT_DEFAULT; sshLibrary = SSH_LIBRARY_DEFAULT; sshKeyPath = SSH_KEY_PATH_DEFAULT; sshClientLib = NetconfSshClientLib.APACHE_MINA; log.info("No component configuration"); return; } Dictionary<?, ?> properties = context.getProperties(); String newSshLibrary; String newSshKeyPath; int newNetconfReplyTimeout = getIntegerProperty( properties, NETCONF_REPLY_TIMEOUT, netconfReplyTimeout); int newNetconfConnectTimeout = getIntegerProperty( properties, NETCONF_CONNECT_TIMEOUT, netconfConnectTimeout); int newNetconfIdleTimeout = getIntegerProperty( properties, NETCONF_IDLE_TIMEOUT, netconfIdleTimeout); newSshLibrary = get(properties, SSH_LIBRARY); newSshKeyPath = get(properties, SSH_KEY_PATH); if (newNetconfConnectTimeout < 0) { log.warn("netconfConnectTimeout is invalid - less than 0"); return; } else if (newNetconfReplyTimeout <= 0) { log.warn("netconfReplyTimeout is invalid - 0 or less."); return; } else if (newNetconfIdleTimeout <= 0) { log.warn("netconfIdleTimeout is invalid - 0 or less."); return; } netconfReplyTimeout = newNetconfReplyTimeout; netconfConnectTimeout = newNetconfConnectTimeout; netconfIdleTimeout = newNetconfIdleTimeout; if (newSshLibrary != null) { sshLibrary = newSshLibrary; sshClientLib = NetconfSshClientLib.getEnum(newSshLibrary); } if (newSshKeyPath != null) { sshKeyPath = newSshKeyPath; } log.info("Settings: {} = {}, {} = {}, {} = {}, {} = {}, {} = {}", NETCONF_REPLY_TIMEOUT, netconfReplyTimeout, NETCONF_CONNECT_TIMEOUT, netconfConnectTimeout, NETCONF_IDLE_TIMEOUT, netconfIdleTimeout, SSH_LIBRARY, sshLibrary, SSH_KEY_PATH, sshKeyPath); }
@Test public void testModified() { assertEquals("Incorrect NetConf connect timeout, should be default", 5, ctrl.netconfConnectTimeout); assertEquals("Incorrect NetConf session timeout, should be default", 5, ctrl.netconfReplyTimeout); ctrl.modified(context); assertEquals("Incorrect NetConf connect timeout, should be default", 2, ctrl.netconfConnectTimeout); assertEquals("Incorrect NetConf session timeout", 1, ctrl.netconfReplyTimeout); assertEquals(NetconfSshClientLib.APACHE_MINA.toString(), ctrl.sshLibrary.toString()); }
public Number evaluate(final List<KiePMMLDefineFunction> defineFunctions, final List<KiePMMLDerivedField> derivedFields, final List<KiePMMLOutputField> outputFields, final Map<String, Object> inputData) { final List<KiePMMLNameValue> kiePMMLNameValues = getKiePMMLNameValuesFromInputDataMap(inputData); ProcessingDTO processingDTO = new ProcessingDTO(defineFunctions, derivedFields, outputFields, Collections.emptyList(), kiePMMLNameValues, Collections.emptyList(), Collections.emptyList()); Object toReturn = expression.evaluate(processingDTO); return toReturn != null ? (Number) toReturn : null; }
@Test void evaluateFromApplyWithComplexPartialScores() { // <ComplexPartialScore> // <Apply function="/"> // <FieldRef>PARAM_1</FieldRef> // <FieldRef>PARAM_2</FieldRef> // </Apply> // </ComplexPartialScore> final KiePMMLFieldRef kiePMMLFieldRef1 = new KiePMMLFieldRef(PARAM_1, Collections.emptyList(), null); final KiePMMLFieldRef kiePMMLFieldRef2 = new KiePMMLFieldRef(PARAM_2, Collections.emptyList(), null); final KiePMMLApply kiePMMLApply = KiePMMLApply.builder("NAME", Collections.emptyList(), "/") .withKiePMMLExpressions(Arrays.asList(kiePMMLFieldRef1, kiePMMLFieldRef2)) .build(); final KiePMMLComplexPartialScore complexPartialScore = new KiePMMLComplexPartialScore(CUSTOM_FIELD, Collections.emptyList(), kiePMMLApply); Object retrieved = complexPartialScore.evaluate(Collections.emptyList(), getDerivedFields(), Collections.emptyList(), Collections.emptyMap()); Object expected = value1 / value2; assertThat(retrieved).isEqualTo(expected); }
public int getEvaluationMethodByDesc( String evaluationMethodDesc ) { if ( null != evaluationMethodDesc ) { for ( int i = 0; i < EVALUATION_METHOD_DESCS.length; ++i ) { if ( EVALUATION_METHOD_DESCS[ i ].equalsIgnoreCase( evaluationMethodDesc ) ) { return i; } } // If this fails, try to match using the code. return getEvaluationMethodByCode( evaluationMethodDesc ); } return DEFAULT_EVALUATION_METHOD; }
@Test public void testGetEvaluationMethodByDesc() throws Exception { CheckSumMeta checkSumMeta = new CheckSumMeta(); // Passing 'null' int evaluationMethod = checkSumMeta.getEvaluationMethodByDesc( null ); assertEquals( CheckSumMeta.DEFAULT_EVALUATION_METHOD, evaluationMethod ); // Passing an unknown description evaluationMethod = checkSumMeta.getEvaluationMethodByDesc( "$#%#&$/(%&%#$%($/)" ); assertEquals( CheckSumMeta.DEFAULT_EVALUATION_METHOD, evaluationMethod ); // The descriptions evaluationMethod = checkSumMeta .getEvaluationMethodByDesc( CheckSumMeta.EVALUATION_METHOD_DESCS[ CheckSumMeta.EVALUATION_METHOD_BYTES ] ); assertEquals( CheckSumMeta.EVALUATION_METHOD_BYTES, evaluationMethod ); evaluationMethod = checkSumMeta.getEvaluationMethodByDesc( Const.KETTLE_CHECKSUM_EVALUATION_METHOD_PENTAHO_STRINGS ); assertEquals( CheckSumMeta.EVALUATION_METHOD_PENTAHO_STRINGS, evaluationMethod ); evaluationMethod = checkSumMeta.getEvaluationMethodByDesc( CheckSumMeta.EVALUATION_METHOD_DESCS[ CheckSumMeta.EVALUATION_METHOD_NATIVE_STRINGS ] ); assertEquals( CheckSumMeta.EVALUATION_METHOD_NATIVE_STRINGS, evaluationMethod ); // Passing the Code instead of the description evaluationMethod = checkSumMeta.getEvaluationMethodByDesc( Const.KETTLE_CHECKSUM_EVALUATION_METHOD_BYTES ); assertEquals( CheckSumMeta.EVALUATION_METHOD_BYTES, evaluationMethod ); evaluationMethod = checkSumMeta.getEvaluationMethodByDesc( Const.KETTLE_CHECKSUM_EVALUATION_METHOD_PENTAHO_STRINGS ); assertEquals( CheckSumMeta.EVALUATION_METHOD_PENTAHO_STRINGS, evaluationMethod ); evaluationMethod = checkSumMeta.getEvaluationMethodByDesc( Const.KETTLE_CHECKSUM_EVALUATION_METHOD_NATIVE_STRINGS ); assertEquals( CheckSumMeta.EVALUATION_METHOD_NATIVE_STRINGS, evaluationMethod ); }
@Override public String named() { return PluginEnum.CONTEXT_PATH.getName(); }
@Test public void namedTest() { assertEquals(PluginEnum.CONTEXT_PATH.getName(), contextPathPlugin.named()); }
public static void copyConfigurationToJob(Properties props, Map<String, String> jobProps) throws HiveException, IOException { checkRequiredPropertiesAreDefined(props); resolveMetadata(props); for (Entry<Object, Object> entry : props.entrySet()) { String key = String.valueOf(entry.getKey()); if (!key.equals(CONFIG_PWD) && !key.equals(CONFIG_PWD_KEYSTORE) && !key.equals(CONFIG_PWD_KEY) && !key.equals(CONFIG_PWD_URI)) { jobProps.put(String.valueOf(entry.getKey()), String.valueOf(entry.getValue())); } } }
@Test(expected = IllegalArgumentException.class) public void testWithUnknownDatabaseType() throws Exception { Properties props = new Properties(); props.put(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), "Postgres"); props.put(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc://localhost:3306/hive"); props.put(JdbcStorageConfig.QUERY.getPropertyName(), "SELECT col1,col2,col3 FROM sometable"); Map<String, String> jobMap = new HashMap<>(); JdbcStorageConfigManager.copyConfigurationToJob(props, jobMap); }
public static void checkDrivingLicenceMrz(String mrz) { if (mrz.charAt(0) != 'D') { throw new VerificationException("MRZ should start with D"); } if (mrz.charAt(1) != '1') { throw new VerificationException("Only BAP configuration is supported (1)"); } if (!mrz.substring(2, 5).equals("NLD")) { throw new VerificationException("Only Dutch driving licence supported"); } if (mrz.length() != 30) { throw new VerificationException("Dutch MRZ should have length of 30"); } checkMrzCheckDigit(mrz); }
@Test public void checkDrivingLicenceMrzLengthWrong() { assertThrows(VerificationException.class, () -> { MrzUtils.checkDrivingLicenceMrz("PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP"); }); }
public static HeaderTemplate create(String name, Iterable<String> values) { if (name == null || name.isEmpty()) { throw new IllegalArgumentException("name is required."); } if (values == null) { throw new IllegalArgumentException("values are required"); } return new HeaderTemplate(name, values, Util.UTF_8); }
@Test void it_should_throw_exception_when_name_is_null() { IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> HeaderTemplate.create(null, Collections.singletonList("test"))); assertThat(exception.getMessage()).isEqualTo("name is required."); }
public static <T> Encoder<T> encoderFor(Coder<T> coder) { Encoder<T> enc = getOrCreateDefaultEncoder(coder.getEncodedTypeDescriptor().getRawType()); return enc != null ? enc : binaryEncoder(coder, true); }
@Test public void testBeamEncoderOfPrivateType() { // Verify concrete types are not used in coder generation. // In case of private types this would cause an IllegalAccessError. List<PrivateString> data = asList(new PrivateString("1"), new PrivateString("2")); Dataset<PrivateString> dataset = createDataset(data, encoderFor(PrivateString.CODER)); assertThat(dataset.collect(), equalTo(data.toArray())); }
@Override public Object handle(ProceedingJoinPoint proceedingJoinPoint, CircuitBreaker circuitBreaker, String methodName) throws Throwable { Object returnValue = proceedingJoinPoint.proceed(); if (Flux.class.isAssignableFrom(returnValue.getClass())) { Flux<?> fluxReturnValue = (Flux<?>) returnValue; return fluxReturnValue.transformDeferred( io.github.resilience4j.reactor.circuitbreaker.operator.CircuitBreakerOperator .of(circuitBreaker)); } else if (Mono.class.isAssignableFrom(returnValue.getClass())) { Mono<?> monoReturnValue = (Mono<?>) returnValue; return monoReturnValue.transformDeferred(CircuitBreakerOperator.of(circuitBreaker)); } else { logger.error("Unsupported type for Reactor circuit breaker {}", returnValue.getClass().getTypeName()); throw new IllegalArgumentException( "Not Supported type for the circuit breaker in Reactor:" + returnValue.getClass() .getName()); } }
@Test public void testReactorTypes() throws Throwable { CircuitBreaker circuitBreaker = CircuitBreaker.ofDefaults("test"); when(proceedingJoinPoint.proceed()).thenReturn(Mono.just("Test")); assertThat(reactorCircuitBreakerAspectExt .handle(proceedingJoinPoint, circuitBreaker, "testMethod")).isNotNull(); when(proceedingJoinPoint.proceed()).thenReturn(Flux.just("Test")); assertThat(reactorCircuitBreakerAspectExt .handle(proceedingJoinPoint, circuitBreaker, "testMethod")).isNotNull(); }
protected void patchNodePorts(Service current, Service desired) { for (ServicePort desiredPort : desired.getSpec().getPorts()) { String portName = desiredPort.getName(); for (ServicePort currentPort : current.getSpec().getPorts()) { if (desiredPort.getNodePort() == null && portName.equals(currentPort.getName()) && currentPort.getNodePort() != null) { desiredPort.setNodePort(currentPort.getNodePort()); } } } }
@Test public void testNodePortPatching() { KubernetesClient client = mock(KubernetesClient.class); Service current = new ServiceBuilder() .withNewMetadata() .withNamespace(NAMESPACE) .withName(RESOURCE_NAME) .endMetadata() .withNewSpec() .withType("NodePort") .withPorts( new ServicePortBuilder() .withName("port1") .withPort(1234) .withTargetPort(new IntOrString(1234)) .withNodePort(31234) .build(), new ServicePortBuilder() .withName("port2") .withPort(5678) .withTargetPort(new IntOrString(5678)) .withNodePort(35678) .build() ) .endSpec() .build(); Service desired = new ServiceBuilder() .withNewMetadata() .withNamespace(NAMESPACE) .withName(RESOURCE_NAME) .endMetadata() .withNewSpec() .withType("NodePort") .withPorts( new ServicePortBuilder() .withName("port2") .withPort(5678) .withTargetPort(new IntOrString(5678)) .build(), new ServicePortBuilder() .withName("port1") .withPort(1234) .withTargetPort(new IntOrString(1234)) .build() ) .endSpec() .build(); ServiceOperator op = new ServiceOperator(vertx, client); op.patchNodePorts(current, desired); assertThat(current.getSpec().getPorts().get(0).getNodePort(), is(desired.getSpec().getPorts().get(1).getNodePort())); assertThat(current.getSpec().getPorts().get(1).getNodePort(), is(desired.getSpec().getPorts().get(0).getNodePort())); }
public static SqlToConnectTypeConverter sqlToConnectConverter() { return SQL_TO_CONNECT_CONVERTER; }
@Test public void shouldConvertNestedComplexFromSql() { assertThat(SchemaConverters.sqlToConnectConverter().toConnectSchema(NESTED_SQL_TYPE), is(NESTED_LOGICAL_TYPE)); }
static String stripSuffix(String str, String suffix) { if (str.endsWith(suffix)) { return str.substring(0, str.length() - suffix.length()); } else { throw new RuntimeException("String " + str + " does not end with the " + "expected suffix " + suffix); } }
@Test public void stripSuffixTest() { assertEquals("FooBa", MessageGenerator.stripSuffix("FooBar", "r")); assertEquals("", MessageGenerator.stripSuffix("FooBar", "FooBar")); assertEquals("Foo", MessageGenerator.stripSuffix("FooBar", "Bar")); assertThrows(RuntimeException.class, () -> MessageGenerator.stripSuffix("FooBar", "Baz")); }
public static String format(Date date) { return format(date, false, TIMEZONE_UTC); }
@Test @SuppressWarnings("JavaUtilDate") public void testDateFormatWithTimezone() { long time = 1530209176870L; Date date = new Date(time); String dateStr = ISO8601Utils.format(date, true, TimeZone.getTimeZone("Brazil/East")); String expectedDate = "2018-06-28T15:06:16.870-03:00"; assertThat(dateStr).isEqualTo(expectedDate); }
public static long parseRelativeTimeInSeconds(String relativeTime) { if (relativeTime.isEmpty()) { throw new IllegalArgumentException("expiry time cannot be empty"); } int lastIndex = relativeTime.length() - 1; char lastChar = relativeTime.charAt(lastIndex); final char timeUnit; if (!Character.isAlphabetic(lastChar)) { // No unit specified, assume seconds timeUnit = 's'; lastIndex = relativeTime.length(); } else { timeUnit = Character.toLowerCase(lastChar); } long duration = Long.parseLong(relativeTime.substring(0, lastIndex)); switch (timeUnit) { case 's': return duration; case 'm': return TimeUnit.MINUTES.toSeconds(duration); case 'h': return TimeUnit.HOURS.toSeconds(duration); case 'd': return TimeUnit.DAYS.toSeconds(duration); case 'w': return 7 * TimeUnit.DAYS.toSeconds(duration); // No unit for months case 'y': return 365 * TimeUnit.DAYS.toSeconds(duration); default: throw new IllegalArgumentException("Invalid time unit '" + lastChar + "'"); } }
@Test public void testParseRelativeTime() { assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("-1"), -1); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("7"), 7); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("3s"), 3); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("3S"), 3); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("5m"), TimeUnit.MINUTES.toSeconds(5)); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("5M"), TimeUnit.MINUTES.toSeconds(5)); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("7h"), TimeUnit.HOURS.toSeconds(7)); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("7H"), TimeUnit.HOURS.toSeconds(7)); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("9d"), TimeUnit.DAYS.toSeconds(9)); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("9D"), TimeUnit.DAYS.toSeconds(9)); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("3w"), 7 * TimeUnit.DAYS.toSeconds(3)); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("11y"), 365 * TimeUnit.DAYS.toSeconds(11)); assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("11Y"), 365 * TimeUnit.DAYS.toSeconds(11)); // Negative interval assertEquals(RelativeTimeUtil.parseRelativeTimeInSeconds("-5m"), -TimeUnit.MINUTES.toSeconds(5)); try { RelativeTimeUtil.parseRelativeTimeInSeconds(""); fail("should have failed"); } catch (IllegalArgumentException e) { // expected } try { // Invalid time unit specified RelativeTimeUtil.parseRelativeTimeInSeconds("1234x"); fail("should have failed"); } catch (IllegalArgumentException e) { // expected } }
@Override public long getPeakMemoryUsage() { return memoryManager.getPeakMemoryUsage(); }
@Test public void testBufferPeakMemoryUsage() { PartitionedOutputBuffer buffer = createPartitionedBuffer( createInitialEmptyOutputBuffers(PARTITIONED) .withBuffer(FIRST, 0) .withNoMoreBufferIds(), sizeOfPages(5)); Page page = createPage(1); long serializePageSize = PAGES_SERDE.serialize(page).getRetainedSizeInBytes(); for (int i = 0; i < 5; i++) { addPage(buffer, page, 0); assertEquals(buffer.getPeakMemoryUsage(), (i + 1) * serializePageSize); } }
public static ResponseHandler cors(final CorsConfig... configs) { return new MocoCorsHandler(configs); }
@Test public void should_support_cors_without_origin() throws Exception { server.response(cors()); running(server, () -> { ClassicHttpResponse response = helper.getResponse(root()); assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); }); }
public static Expression generateFilterExpression(SearchArgument sarg) { return translate(sarg.getExpression(), sarg.getLeaves()); }
@Test public void testLessThanOperand() { SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); SearchArgument arg = builder.startAnd().lessThan("salary", PredicateLeaf.Type.LONG, 3000L).end().build(); UnboundPredicate expected = Expressions.lessThan("salary", 3000L); UnboundPredicate actual = (UnboundPredicate) HiveIcebergFilterFactory.generateFilterExpression(arg); assertThat(expected.op()).isEqualTo(actual.op()); assertThat(expected.literal()).isEqualTo(actual.literal()); assertThat(expected.ref().name()).isEqualTo(actual.ref().name()); }
public static String numberCharToChinese(char c, boolean isUseTraditional) { if (c < '0' || c > '9') { return String.valueOf(c); } return String.valueOf(numberToChinese(c - '0', isUseTraditional)); }
@Test public void numberCharToChineseTest() { String s = NumberChineseFormatter.numberCharToChinese('1', false); assertEquals("一", s); s = NumberChineseFormatter.numberCharToChinese('2', false); assertEquals("二", s); s = NumberChineseFormatter.numberCharToChinese('0', false); assertEquals("零", s); // 非数字字符原样返回 s = NumberChineseFormatter.numberCharToChinese('A', false); assertEquals("A", s); }
public static <K, InputT> GroupIntoBatches<K, InputT> ofSize(long batchSize) { Preconditions.checkState(batchSize < Long.MAX_VALUE); return new GroupIntoBatches<K, InputT>(BatchingParams.createDefault()).withSize(batchSize); }
@Test @Category({ ValidatesRunner.class, NeedsRunner.class, UsesTimersInParDo.class, UsesStatefulParDo.class, UsesOnWindowExpiration.class }) public void testInGlobalWindowBatchSizeCount() { PCollection<KV<String, Iterable<String>>> collection = pipeline .apply("Input data", Create.of(data)) .apply(GroupIntoBatches.ofSize(BATCH_SIZE)) // set output coder .setCoder(KvCoder.of(StringUtf8Coder.of(), IterableCoder.of(StringUtf8Coder.of()))); PAssert.that("Incorrect batch size in one or more elements", collection) .satisfies( new SerializableFunction<Iterable<KV<String, Iterable<String>>>, Void>() { @Override public Void apply(Iterable<KV<String, Iterable<String>>> input) { assertTrue(checkBatchSizes(input)); return null; } }); PAssert.thatSingleton("Incorrect collection size", collection.apply("Count", Count.globally())) .isEqualTo(EVEN_NUM_ELEMENTS / BATCH_SIZE); pipeline.run(); }