focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public List<UsbSerialPort> getPorts() { return mPorts; }
@Test public void singleInterfaceDevice() throws Exception { UsbDeviceConnection usbDeviceConnection = mock(UsbDeviceConnection.class); UsbDevice usbDevice = mock(UsbDevice.class); UsbInterface usbInterface = mock(UsbInterface.class); UsbEndpoint controlEndpoint = mock(UsbEndpoint.class); UsbEndpoint readEndpoint = mock(UsbEndpoint.class); UsbEndpoint writeEndpoint = mock(UsbEndpoint.class); when(usbDeviceConnection.claimInterface(usbInterface,true)).thenReturn(true); when(usbDevice.getInterfaceCount()).thenReturn(1); when(usbDevice.getInterface(0)).thenReturn(usbInterface); when(usbInterface.getEndpointCount()).thenReturn(3); when(usbInterface.getEndpoint(0)).thenReturn(controlEndpoint); when(usbInterface.getEndpoint(1)).thenReturn(readEndpoint); when(usbInterface.getEndpoint(2)).thenReturn(writeEndpoint); when(controlEndpoint.getDirection()).thenReturn(UsbConstants.USB_DIR_IN); when(controlEndpoint.getType()).thenReturn(UsbConstants.USB_ENDPOINT_XFER_INT); when(readEndpoint.getDirection()).thenReturn(UsbConstants.USB_DIR_IN); when(readEndpoint.getType()).thenReturn(UsbConstants.USB_ENDPOINT_XFER_BULK); when(writeEndpoint.getDirection()).thenReturn(UsbConstants.USB_DIR_OUT); when(writeEndpoint.getType()).thenReturn(UsbConstants.USB_ENDPOINT_XFER_BULK); CdcAcmSerialDriver driver = new CdcAcmSerialDriver(usbDevice); CdcAcmSerialDriver.CdcAcmSerialPort port = (CdcAcmSerialDriver.CdcAcmSerialPort) driver.getPorts().get(0); port.mConnection = usbDeviceConnection; port.openInt(); assertEquals(readEndpoint, port.mReadEndpoint); assertEquals(writeEndpoint, port.mWriteEndpoint); ProbeTable probeTable = UsbSerialProber.getDefaultProbeTable(); Class<? extends UsbSerialDriver> probeDriver = probeTable.findDriver(usbDevice); assertNull(probeDriver); }
public static ViewMetadata fromJson(String metadataLocation, String json) { return JsonUtil.parse(json, node -> ViewMetadataParser.fromJson(metadataLocation, node)); }
@Test public void failReadingViewMetadataMissingLocation() throws Exception { String json = readViewMetadataInputFile("org/apache/iceberg/view/ViewMetadataMissingLocation.json"); assertThatThrownBy(() -> ViewMetadataParser.fromJson(json)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse missing string: location"); }
@Override public AppResponse process(Flow flow, MrzDocumentRequest params) { if(!(params.getDocumentType().equals("PASSPORT") || params.getDocumentType().equals("ID_CARD"))){ return new NokResponse(); } Map<String, String> travelDocument = Map.of( "documentNumber", params.getDocumentNumber(), "dateOfBirth", params.getDateOfBirth(), "dateOfExpiry", params.getDateOfExpiry()); digidClient.remoteLog("867", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId(), HIDDEN, true)); appSession.setRdaSessionStatus("DOCUMENTS_RECEIVED"); Map<String, String> rdaSession = rdaClient.startSession( returnUrl.concat("/iapi/rda/confirm"), appSession.getId(), params.getIpAddress(), List.of(travelDocument), List.of()); if(rdaSession.isEmpty()){ digidClient.remoteLog("873", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId(), HIDDEN, true)); return new NokResponse(); } appSession.setConfirmSecret(rdaSession.get("confirmSecret")); appSession.setUrl(rdaSession.get("url")); appSession.setRdaSessionId(rdaSession.get("sessionId")); appSession.setRdaSessionTimeoutInSeconds(rdaSession.get("expiration")); appSession.setRdaSessionStatus("SCANNING_FOREIGN"); appSession.setRdaDocumentType(params.getDocumentType()); appSession.setRdaDocumentNumber(params.getDocumentNumber()); digidClient.remoteLog("868", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId(), HIDDEN, true)); return new RdaResponse(appSession.getUrl(), appSession.getRdaSessionId()); }
@Test public void processValid() { //given MrzDocumentRequest mrzDocumentRequest = new MrzDocumentRequest(); mrzDocumentRequest.setDocumentType("PASSPORT"); mrzDocumentRequest.setDateOfBirth("test"); mrzDocumentRequest.setDateOfExpiry("test"); mrzDocumentRequest.setDocumentNumber("dfdf"); //when mrzDocumentInitialized.process(mockedFlow, mrzDocumentRequest); //then verify(digidClientMock, times(1)).remoteLog("867", Map.of(lowerUnderscore(ACCOUNT_ID), mockedAppSession.getAccountId(), HIDDEN, true)); }
CompletableFuture<String> getOperationFuture() { return operationFuture; }
@Test void testJobFailedAndSavepointOperationFails() throws Exception { try (MockStopWithSavepointContext ctx = new MockStopWithSavepointContext()) { StateTrackingMockExecutionGraph mockExecutionGraph = new StateTrackingMockExecutionGraph(); CompletableFuture<String> savepointFuture = new CompletableFuture<>(); StopWithSavepoint sws = createStopWithSavepoint(ctx, mockExecutionGraph, savepointFuture); ctx.setStopWithSavepoint(sws); ctx.setHowToHandleFailure(FailureResult::canNotRestart); ctx.setExpectFailing( failingArguments -> { assertThat(failingArguments.getExecutionGraph().getState()) .isEqualTo(JobStatus.FAILED); assertThat(failingArguments.getFailureCause()) .satisfies(FlinkAssertions.anyCauseMatches(FlinkException.class)); }); // fail job: mockExecutionGraph.completeTerminationFuture(JobStatus.FAILED); savepointFuture.completeExceptionally(new RuntimeException()); ctx.triggerExecutors(); assertThat(sws.getOperationFuture()).isCompletedExceptionally(); } }
public static boolean matches(MetricsFilter filter, MetricKey key) { if (filter == null) { return true; } @Nullable String stepName = key.stepName(); if (stepName == null) { if (!filter.steps().isEmpty()) { // The filter specifies steps, but the metric is not associated with a step. return false; } } else if (!matchesScope(stepName, filter.steps())) { // The filter specifies steps that differ from the metric's step return false; } // The filter's steps match the metric's step. return matchesName(key.metricName(), filter.names()); }
@Test public void testMatchStepNameFilters() { // MetricsFilter with a Class-namespace + name filter + step filter. // Successful match. assertTrue( MetricFiltering.matches( MetricsFilter.builder() .addNameFilter(MetricNameFilter.named(MetricFilteringTest.class, "myMetricName")) .addStep("myStep") .build(), MetricKey.create( "myStep", MetricName.named(MetricFilteringTest.class, "myMetricName")))); // Unsuccessful match. assertFalse( MetricFiltering.matches( MetricsFilter.builder() .addNameFilter(MetricNameFilter.named(MetricFilteringTest.class, "myMetricName")) .addStep("myOtherStep") .build(), MetricKey.create( "myStep", MetricName.named(MetricFilteringTest.class, "myMetricName")))); }
@VisibleForTesting static void updateColumnNames(String rawTableName, PinotQuery pinotQuery, boolean isCaseInsensitive, Map<String, String> columnNameMap) { if (pinotQuery != null) { boolean hasStar = false; for (Expression expression : pinotQuery.getSelectList()) { fixColumnName(rawTableName, expression, columnNameMap, isCaseInsensitive); //check if the select expression is '*' if (!hasStar && expression.equals(STAR)) { hasStar = true; } } //if query has a '*' selection along with other columns if (hasStar) { expandStarExpressionsToActualColumns(pinotQuery, columnNameMap); } Expression filterExpression = pinotQuery.getFilterExpression(); if (filterExpression != null) { // We don't support alias in filter expression, so we don't need to pass aliasMap fixColumnName(rawTableName, filterExpression, columnNameMap, isCaseInsensitive); } List<Expression> groupByList = pinotQuery.getGroupByList(); if (groupByList != null) { for (Expression expression : groupByList) { fixColumnName(rawTableName, expression, columnNameMap, isCaseInsensitive); } } List<Expression> orderByList = pinotQuery.getOrderByList(); if (orderByList != null) { for (Expression expression : orderByList) { // NOTE: Order-by is always a Function with the ordering of the Expression fixColumnName(rawTableName, expression.getFunctionCall().getOperands().get(0), columnNameMap, isCaseInsensitive); } } Expression havingExpression = pinotQuery.getHavingExpression(); if (havingExpression != null) { fixColumnName(rawTableName, havingExpression, columnNameMap, isCaseInsensitive); } } }
@Test public void testUpdateColumnNames() { String query = "SELECT database.my_table.column_name_1st, column_name_2nd from database.my_table"; PinotQuery pinotQuery = CalciteSqlParser.compileToPinotQuery(query); Map<String, String> columnNameMap = Map.of("column_name_1st", "column_name_1st", "column_name_2nd", "column_name_2nd"); BaseSingleStageBrokerRequestHandler.updateColumnNames("database.my_table", pinotQuery, false, columnNameMap); Assert.assertEquals(pinotQuery.getSelectList().size(), 2); for (Expression expression : pinotQuery.getSelectList()) { String columnName = expression.getIdentifier().getName(); if (columnName.endsWith("column_name_1st")) { Assert.assertEquals(columnName, "column_name_1st"); } else if (columnName.endsWith("column_name_2nd")) { Assert.assertEquals(columnName, "column_name_2nd"); } else { Assert.fail("rewritten column name should be column_name_1st or column_name_1st, but is " + columnName); } } }
@Override public String authenticateRequest(final HttpServletRequest request) { final String smUser = request.getHeader(SITE_MINDER_HEADER.getValue()); if (smUser == null || smUser.trim().isEmpty()) { // SiteMinder has not authenticated the user return null; } else { return smUser; } }
@Test public void willAuthenticateAUser() { final String userId = "a-test-user"; doReturn(userId).when(request).getHeader("SM_USER"); final String authenticatedUser = authenticator.authenticateRequest(request); assertThat(authenticatedUser, is(userId)); }
public static void onFail(final ServerMemberManager manager, final Member member) { // To avoid null pointer judgments, pass in one NONE_EXCEPTION onFail(manager, member, ExceptionUtil.NONE_EXCEPTION); }
@SuppressWarnings("checkstyle:AbbreviationAsWordInName") @Test void testMemberOnFailWhenMemberAlreadyNOUP() { final Member remote = buildMember(); remote.setState(NodeState.DOWN); remote.setFailAccessCnt(4); MemberUtil.onFail(memberManager, remote); verify(memberManager, never()).notifyMemberChange(remote); }
@Internal public StreamGraph getStreamGraph() { return getStreamGraph(true); }
@Test void testGetStreamGraph() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource<Integer> dataStream1 = env.fromData(1, 2, 3); dataStream1.sinkTo(new DiscardingSink<>()); assertThat(env.getStreamGraph().getStreamNodes().size()).isEqualTo(2); DataStreamSource<Integer> dataStream2 = env.fromData(1, 2, 3); dataStream2.sinkTo(new DiscardingSink<>()); // Previous getStreamGraph() call cleaned dataStream1 transformations assertThat(env.getStreamGraph().getStreamNodes().size()).isEqualTo(2); DataStreamSource<Integer> dataStream3 = env.fromData(1, 2, 3); dataStream3.sinkTo(new DiscardingSink<>()); // Does not clear the transformations. env.getExecutionPlan(); DataStreamSource<Integer> dataStream4 = env.fromData(1, 2, 3); dataStream4.sinkTo(new DiscardingSink<>()); // dataStream3 are preserved assertThat(env.getStreamGraph().getStreamNodes().size()).isEqualTo(4); }
@Override @NonNull public Mono<Void> filter(@NonNull ServerWebExchange exchange, @NonNull WebFilterChain chain) { return redirectMatcher.matches(exchange) .flatMap(matched -> { if (!matched.isMatch()) { return chain.filter(exchange); } return initializationStateGetter.userInitialized() .defaultIfEmpty(false) .flatMap(initialized -> { if (initialized) { return chain.filter(exchange); } // Redirect to set up page if system is not initialized. return redirectStrategy.sendRedirect(exchange, location); }); }); }
@Test void shouldRedirectWhenSystemNotInitialized() { when(initializationStateGetter.userInitialized()).thenReturn(Mono.just(false)); WebFilterChain chain = mock(WebFilterChain.class); MockServerHttpRequest request = MockServerHttpRequest.get("/").build(); MockServerWebExchange exchange = MockServerWebExchange.from(request); when(serverRedirectStrategy.sendRedirect(any(), any())).thenReturn(Mono.empty().then()); Mono<Void> result = filter.filter(exchange, chain); StepVerifier.create(result) .expectNextCount(0) .expectComplete() .verify(); verify(serverRedirectStrategy).sendRedirect(eq(exchange), eq(URI.create("/console"))); verify(chain, never()).filter(eq(exchange)); }
@Override @CacheEvict(cacheNames = RedisKeyConstants.NOTIFY_TEMPLATE, allEntries = true) // allEntries 清空所有缓存,因为可能修改到 code 字段,不好清理 public void updateNotifyTemplate(NotifyTemplateSaveReqVO updateReqVO) { // 校验存在 validateNotifyTemplateExists(updateReqVO.getId()); // 校验站内信编码是否重复 validateNotifyTemplateCodeDuplicate(updateReqVO.getId(), updateReqVO.getCode()); // 更新 NotifyTemplateDO updateObj = BeanUtils.toBean(updateReqVO, NotifyTemplateDO.class); updateObj.setParams(parseTemplateContentParams(updateObj.getContent())); notifyTemplateMapper.updateById(updateObj); }
@Test public void testUpdateNotifyTemplate_notExists() { // 准备参数 NotifyTemplateSaveReqVO reqVO = randomPojo(NotifyTemplateSaveReqVO.class); // 调用, 并断言异常 assertServiceException(() -> notifyTemplateService.updateNotifyTemplate(reqVO), NOTIFY_TEMPLATE_NOT_EXISTS); }
public static List<Object> getObjects() { return Arrays.asList(SvnScmProvider.class, SvnBlameCommand.class, SvnConfiguration.class ); }
@Test public void getObjects_shouldNotBeEmpty() { assertThat(SvnScmSupport.getObjects()).isNotEmpty(); }
static long sizeOf(Mutation m) { if (m.getOperation() == Mutation.Op.DELETE) { return sizeOf(m.getKeySet()); } long result = 0; for (Value v : m.getValues()) { switch (v.getType().getCode()) { case ARRAY: result += estimateArrayValue(v); break; case STRUCT: throw new IllegalArgumentException("Structs are not supported in mutation."); default: result += estimatePrimitiveValue(v); } } return result; }
@Test public void primitiveArrays() throws Exception { Mutation int64 = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toInt64Array(new long[] {1L, 2L, 3L}) .build(); Mutation float32 = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toFloat32Array(new float[] {1.0f, 2.0f}) .build(); Mutation float64 = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toFloat64Array(new double[] {1., 2.}) .build(); Mutation bool = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toBoolArray(new boolean[] {true, true, false, true}) .build(); Mutation numeric = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toNumericArray( ImmutableList.of( new BigDecimal("12345678901234567890.123456789"), new BigDecimal("12345678901234567890123.1234567890123"), new BigDecimal("123456789012345678901234.1234567890123456"), new BigDecimal("1234567890123456789012345.1234567890123456789"))) .build(); Mutation pgNumeric = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toPgNumericArray( ImmutableList.of( "12345678901234567890.123456789", "12345678901234567890123.1234567890123", "123456789012345678901234.1234567890123456", "1234567890123456789012345.1234567890123456789", "NaN")) .build(); Mutation json = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toJsonArray( ImmutableList.of( "{\"key1\":\"value1\", \"key2\":\"value2\"}", "{\"key1\":\"value1\", \"key2\":20}")) .build(); Mutation bytes = Mutation.newInsertOrUpdateBuilder("test") .set("bytes") .toBytesArray( ImmutableList.of( ByteArray.copyFrom("some_bytes".getBytes(UTF_8)), ByteArray.copyFrom("some_bytes".getBytes(UTF_8)))) .build(); Mutation jsonb = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toPgJsonbArray( ImmutableList.of( "{\"key123\":\"value123\", \"key321\":\"value321\"}", "{\"key456\":\"value456\", \"key789\":600}")) .build(); Mutation protoEnum = Mutation.newInsertOrUpdateBuilder("test") .set("one") .toProtoEnumArray(ImmutableList.of(1L, 2L, 3L), "customer.app.TestEnum") .build(); Mutation protos = Mutation.newInsertOrUpdateBuilder("test") .set("bytes") .toProtoMessageArray( ImmutableList.of( ByteArray.copyFrom("some_bytes".getBytes(UTF_8)), ByteArray.copyFrom("some_bytes".getBytes(UTF_8))), "customer.app.TestMessage") .build(); assertThat(MutationSizeEstimator.sizeOf(int64), is(24L)); assertThat(MutationSizeEstimator.sizeOf(float32), is(8L)); assertThat(MutationSizeEstimator.sizeOf(float64), is(16L)); assertThat(MutationSizeEstimator.sizeOf(bool), is(4L)); assertThat(MutationSizeEstimator.sizeOf(numeric), is(153L)); assertThat(MutationSizeEstimator.sizeOf(pgNumeric), is(156L)); assertThat(MutationSizeEstimator.sizeOf(json), is(62L)); assertThat(MutationSizeEstimator.sizeOf(bytes), is(20L)); assertThat(MutationSizeEstimator.sizeOf(jsonb), is(77L)); assertThat(MutationSizeEstimator.sizeOf(protoEnum), is(24L)); assertThat(MutationSizeEstimator.sizeOf(protos), is(20L)); }
public Object invokeMethod(String methodName) { return invokeMethod(methodName, (Class []) null, (Object []) null); }
@Test void testInvokeMethod_shouldAbleToInvokeMethodWithTypeInference() throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, IllegalAccessException { Resource r = new Resource(null, new ResourceId("pool1", "name1"), "object"); assertEquals("ect", r.invokeMethod("substring", new Object[]{3})); assertEquals("obj", r.invokeMethod("substring", new Object[]{0,3})); assertEquals(true, r.invokeMethod("startsWith", new Object[]{"obj"})); assertEquals(2, r.invokeMethod("indexOf", new Object[]{'j'})); assertEquals(4, r.invokeMethod("indexOf", new Object[]{"ct",3})); assertEquals("ect", r.invokeMethod("substring", new ArrayList<>(Arrays.asList(3)))); assertEquals("ec", r.invokeMethod("substring", new ArrayList<>(Arrays.asList(3,5)))); assertEquals(true, r.invokeMethod("startsWith", new ArrayList<>(Arrays.asList("obj")))); }
@Override public boolean test(Creature t) { return t.getSize().equals(size); }
@Test void testMovement() { final var normalCreature = mock(Creature.class); when(normalCreature.getSize()).thenReturn(Size.NORMAL); final var smallCreature = mock(Creature.class); when(smallCreature.getSize()).thenReturn(Size.SMALL); final var normalSelector = new SizeSelector(Size.NORMAL); assertTrue(normalSelector.test(normalCreature)); assertFalse(normalSelector.test(smallCreature)); }
public int getTailMatchLength(ElementPath p) { if (p == null) { return 0; } int lSize = this.partList.size(); int rSize = p.partList.size(); // no match possible for empty sets if ((lSize == 0) || (rSize == 0)) { return 0; } int minLen = (lSize <= rSize) ? lSize : rSize; int match = 0; // loop from the end to the front for (int i = 1; i <= minLen; i++) { String l = this.partList.get(lSize - i); String r = p.partList.get(rSize - i); if (equalityCheck(l, r)) { match++; } else { break; } } return match; }
@Test public void testTailMatch() { { ElementPath p = new ElementPath("/a/b"); ElementSelector ruleElementSelector = new ElementSelector("*"); assertEquals(0, ruleElementSelector.getTailMatchLength(p)); } { ElementPath p = new ElementPath("/a"); ElementSelector ruleElementSelector = new ElementSelector("*/a"); assertEquals(1, ruleElementSelector.getTailMatchLength(p)); } { ElementPath p = new ElementPath("/A"); ElementSelector ruleElementSelector = new ElementSelector("*/a"); assertEquals(1, ruleElementSelector.getTailMatchLength(p)); } { ElementPath p = new ElementPath("/a"); ElementSelector ruleElementSelector = new ElementSelector("*/A"); assertEquals(1, ruleElementSelector.getTailMatchLength(p)); } { ElementPath p = new ElementPath("/a/b"); ElementSelector ruleElementSelector = new ElementSelector("*/b"); assertEquals(1, ruleElementSelector.getTailMatchLength(p)); } { ElementPath p = new ElementPath("/a/B"); ElementSelector ruleElementSelector = new ElementSelector("*/b"); assertEquals(1, ruleElementSelector.getTailMatchLength(p)); } { ElementPath p = new ElementPath("/a/b/c"); ElementSelector ruleElementSelector = new ElementSelector("*/b/c"); assertEquals(2, ruleElementSelector.getTailMatchLength(p)); } }
static @NonNull CloudStringReader of(final @NonNull CommandInput commandInput) { return new CloudStringReader(commandInput); }
@Test void testUnchanged() { // Arrange final CommandInput commandInput = CommandInput.of("hello world"); // Act CloudStringReader.of(commandInput); // Assert assertThat(commandInput.remainingInput()).isEqualTo("hello world"); }
protected final void encode(ChannelHandlerContext ctx, Iterable<OFMessage> msgs, ByteBuf out) { msgs.forEach(msg -> msg.writeTo(out)); }
@Test public void testEncode() throws Exception { OFMessageEncoder encoder = OFMessageEncoder.getInstance(); MockOfMessage message1 = new MockOfMessage(); encoder.encode(null, Collections.singletonList(message1), buf); assertThat(buf.isReadable(), Matchers.is(true)); byte[] channelBytes = new byte[buf.readableBytes()]; buf.readBytes(channelBytes); String expectedListMessage = "message1 "; assertThat(channelBytes, is(expectedListMessage.getBytes())); }
@Override @Transactional(rollbackFor = Exception.class) public String bindSocialUser(SocialUserBindReqDTO reqDTO) { // 获得社交用户 SocialUserDO socialUser = authSocialUser(reqDTO.getSocialType(), reqDTO.getUserType(), reqDTO.getCode(), reqDTO.getState()); Assert.notNull(socialUser, "社交用户不能为空"); // 社交用户可能之前绑定过别的用户,需要进行解绑 socialUserBindMapper.deleteByUserTypeAndSocialUserId(reqDTO.getUserType(), socialUser.getId()); // 用户可能之前已经绑定过该社交类型,需要进行解绑 socialUserBindMapper.deleteByUserTypeAndUserIdAndSocialType(reqDTO.getUserType(), reqDTO.getUserId(), socialUser.getType()); // 绑定当前登录的社交用户 SocialUserBindDO socialUserBind = SocialUserBindDO.builder() .userId(reqDTO.getUserId()).userType(reqDTO.getUserType()) .socialUserId(socialUser.getId()).socialType(socialUser.getType()).build(); socialUserBindMapper.insert(socialUserBind); return socialUser.getOpenid(); }
@Test public void testBindSocialUser() { // 准备参数 SocialUserBindReqDTO reqDTO = new SocialUserBindReqDTO() .setUserId(1L).setUserType(UserTypeEnum.ADMIN.getValue()) .setSocialType(SocialTypeEnum.GITEE.getType()).setCode("test_code").setState("test_state"); // mock 数据:获得社交用户 SocialUserDO socialUser = randomPojo(SocialUserDO.class).setType(reqDTO.getSocialType()) .setCode(reqDTO.getCode()).setState(reqDTO.getState()); socialUserMapper.insert(socialUser); // mock 数据:用户可能之前已经绑定过该社交类型 socialUserBindMapper.insert(randomPojo(SocialUserBindDO.class).setUserId(1L).setUserType(UserTypeEnum.ADMIN.getValue()) .setSocialType(SocialTypeEnum.GITEE.getType()).setSocialUserId(-1L)); // mock 数据:社交用户可能之前绑定过别的用户 socialUserBindMapper.insert(randomPojo(SocialUserBindDO.class).setUserType(UserTypeEnum.ADMIN.getValue()) .setSocialType(SocialTypeEnum.GITEE.getType()).setSocialUserId(socialUser.getId())); // 调用 String openid = socialUserService.bindSocialUser(reqDTO); // 断言 List<SocialUserBindDO> socialUserBinds = socialUserBindMapper.selectList(); assertEquals(1, socialUserBinds.size()); assertEquals(socialUser.getOpenid(), openid); }
public static Instant windmillToHarnessTimestamp(long timestampUs) { // Windmill should never send us an unknown timestamp. Preconditions.checkArgument(timestampUs != Long.MIN_VALUE); Instant result = new Instant(divideAndRoundDown(timestampUs, 1000)); if (result.isBefore(BoundedWindow.TIMESTAMP_MIN_VALUE)) { return BoundedWindow.TIMESTAMP_MIN_VALUE; } if (result.isAfter(BoundedWindow.TIMESTAMP_MAX_VALUE)) { // End of time. return BoundedWindow.TIMESTAMP_MAX_VALUE; } return result; }
@Test public void testWindmillToHarnessTimestamp() { assertEquals(BoundedWindow.TIMESTAMP_MAX_VALUE, windmillToHarnessTimestamp(Long.MAX_VALUE)); assertEquals( BoundedWindow.TIMESTAMP_MAX_VALUE, windmillToHarnessTimestamp(Long.MAX_VALUE - 17)); assertEquals(new Instant(16), windmillToHarnessWatermark(16999)); assertEquals(new Instant(17), windmillToHarnessTimestamp(17120)); assertEquals(new Instant(17), windmillToHarnessTimestamp(17000)); assertEquals(new Instant(-17), windmillToHarnessTimestamp(-16987)); assertEquals(new Instant(-17), windmillToHarnessTimestamp(-17000)); assertEquals(new Instant(-18), windmillToHarnessTimestamp(-17001)); assertEquals(BoundedWindow.TIMESTAMP_MIN_VALUE, windmillToHarnessTimestamp(Long.MIN_VALUE + 1)); assertEquals(BoundedWindow.TIMESTAMP_MIN_VALUE, windmillToHarnessTimestamp(Long.MIN_VALUE + 2)); // Long.MIN_VALUE = -9223372036854775808, need to add 1808 microseconds to get to next // millisecond returned by Beam. assertEquals( BoundedWindow.TIMESTAMP_MIN_VALUE.plus(Duration.millis(1)), windmillToHarnessTimestamp(Long.MIN_VALUE + 1808)); assertEquals( BoundedWindow.TIMESTAMP_MIN_VALUE, windmillToHarnessTimestamp(Long.MIN_VALUE + 1807)); }
public <T> T getStore(final StoreQueryParameters<T> storeQueryParameters) { final String storeName = storeQueryParameters.storeName(); final QueryableStoreType<T> queryableStoreType = storeQueryParameters.queryableStoreType(); final List<T> globalStore = globalStoreProvider.stores(storeName, queryableStoreType); if (!globalStore.isEmpty()) { return queryableStoreType.create(globalStoreProvider, storeName); } return queryableStoreType.create( new WrappingStoreProvider(storeProviders.values(), storeQueryParameters), storeName ); }
@Test public void shouldThrowExceptionWhenWindowStoreWithPartitionDoesntExists() { final int partition = numStateStorePartitions + 1; final InvalidStateStoreException thrown = assertThrows(InvalidStateStoreException.class, () -> storeProvider.getStore( StoreQueryParameters .fromNameAndType(windowStore, QueryableStoreTypes.windowStore()) .withPartition(partition)).fetch("1", System.currentTimeMillis()) ); assertThat(thrown.getMessage(), equalTo(String.format("The specified partition %d for store %s does not exist.", partition, windowStore))); }
public void setExternalTaskQueueCapacity(int externalTaskQueueCapacity) { this.externalTaskQueueCapacity = checkPositive(externalTaskQueueCapacity, "externalTaskQueueCapacity"); }
@Test public void test_setExternalTaskQueueCapacity_whenZero() { ReactorBuilder builder = newBuilder(); assertThrows(IllegalArgumentException.class, () -> builder.setExternalTaskQueueCapacity(0)); }
static Map<String, String> resolveVariables(String expression, String str) { if (expression == null || str == null) return Collections.emptyMap(); Map<String, String> resolvedVariables = new HashMap<>(); StringBuilder variableBuilder = new StringBuilder(); State state = State.TEXT; int j = 0; int expressionLength = expression.length(); for (int i = 0; i < expressionLength; i++) { char e = expression.charAt(i); switch (e) { case '{': if (state == END_VAR) return Collections.emptyMap(); state = VAR; break; case '}': if (state != VAR) return Collections.emptyMap(); state = END_VAR; if (i != expressionLength - 1) break; default: switch (state) { case VAR: variableBuilder.append(e); break; case END_VAR: String replacement; boolean ec = i == expressionLength - 1; if (ec) { replacement = str.substring(j); } else { int k = str.indexOf(e, j); if (k == -1) return Collections.emptyMap(); replacement = str.substring(j, str.indexOf(e, j)); } resolvedVariables.put(variableBuilder.toString(), replacement); j += replacement.length(); if (j == str.length() && ec) return resolvedVariables; variableBuilder.setLength(0); state = TEXT; case TEXT: if (str.charAt(j) != e) return Collections.emptyMap(); j++; } } } return resolvedVariables; }
@Test public void testDualVariables3() { Map<String, String> res = resolveVariables("{a}:{b}", "value1:value2"); assertEquals(2, res.size()); assertEquals(res.get("a"), "value1"); assertEquals(res.get("b"), "value2"); }
DecodedJWT verifyJWT(PublicKey publicKey, String publicKeyAlg, DecodedJWT jwt) throws AuthenticationException { if (publicKeyAlg == null) { incrementFailureMetric(AuthenticationExceptionCode.UNSUPPORTED_ALGORITHM); throw new AuthenticationException("PublicKey algorithm cannot be null"); } Algorithm alg; try { switch (publicKeyAlg) { case ALG_RS256: alg = Algorithm.RSA256((RSAPublicKey) publicKey, null); break; case ALG_RS384: alg = Algorithm.RSA384((RSAPublicKey) publicKey, null); break; case ALG_RS512: alg = Algorithm.RSA512((RSAPublicKey) publicKey, null); break; case ALG_ES256: alg = Algorithm.ECDSA256((ECPublicKey) publicKey, null); break; case ALG_ES384: alg = Algorithm.ECDSA384((ECPublicKey) publicKey, null); break; case ALG_ES512: alg = Algorithm.ECDSA512((ECPublicKey) publicKey, null); break; default: incrementFailureMetric(AuthenticationExceptionCode.UNSUPPORTED_ALGORITHM); throw new AuthenticationException("Unsupported algorithm: " + publicKeyAlg); } } catch (ClassCastException e) { incrementFailureMetric(AuthenticationExceptionCode.ALGORITHM_MISMATCH); throw new AuthenticationException("Expected PublicKey alg [" + publicKeyAlg + "] does match actual alg."); } // We verify issuer when retrieving the PublicKey, so it is not verified here. // The claim presence requirements are based on https://openid.net/specs/openid-connect-basic-1_0.html#IDToken Verification verifierBuilder = JWT.require(alg) .acceptLeeway(acceptedTimeLeewaySeconds) .withAnyOfAudience(allowedAudiences) .withClaimPresence(RegisteredClaims.ISSUED_AT) .withClaimPresence(RegisteredClaims.EXPIRES_AT) .withClaimPresence(RegisteredClaims.NOT_BEFORE) .withClaimPresence(RegisteredClaims.SUBJECT); if (isRoleClaimNotSubject) { verifierBuilder = verifierBuilder.withClaimPresence(roleClaim); } JWTVerifier verifier = verifierBuilder.build(); try { return verifier.verify(jwt); } catch (TokenExpiredException e) { incrementFailureMetric(AuthenticationExceptionCode.EXPIRED_JWT); throw new AuthenticationException("JWT expired: " + e.getMessage()); } catch (SignatureVerificationException e) { incrementFailureMetric(AuthenticationExceptionCode.ERROR_VERIFYING_JWT_SIGNATURE); throw new AuthenticationException("JWT signature verification exception: " + e.getMessage()); } catch (InvalidClaimException e) { incrementFailureMetric(AuthenticationExceptionCode.INVALID_JWT_CLAIM); throw new AuthenticationException("JWT contains invalid claim: " + e.getMessage()); } catch (AlgorithmMismatchException e) { incrementFailureMetric(AuthenticationExceptionCode.ALGORITHM_MISMATCH); throw new AuthenticationException("JWT algorithm does not match Public Key algorithm: " + e.getMessage()); } catch (JWTDecodeException e) { incrementFailureMetric(AuthenticationExceptionCode.ERROR_DECODING_JWT); throw new AuthenticationException("Error while decoding JWT: " + e.getMessage()); } catch (JWTVerificationException | IllegalArgumentException e) { incrementFailureMetric(AuthenticationExceptionCode.ERROR_VERIFYING_JWT); throw new AuthenticationException("JWT verification failed: " + e.getMessage()); } }
@Test public void testThatSupportedAlgWithMismatchedPublicKeyFromSameAlgFamilyFails() throws IOException { KeyPair keyPair = Keys.keyPairFor(SignatureAlgorithm.RS256); @Cleanup AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID(); DefaultJwtBuilder defaultJwtBuilder = new DefaultJwtBuilder(); addValidMandatoryClaims(defaultJwtBuilder, basicProviderAudience); defaultJwtBuilder.signWith(keyPair.getPrivate()); DecodedJWT jwt = JWT.decode(defaultJwtBuilder.compact()); // Choose a different algorithm but within the same alg family as above Assert.assertThrows(AuthenticationException.class, () -> provider.verifyJWT(keyPair.getPublic(), SignatureAlgorithm.RS512.getValue(), jwt)); }
public void updateCheckboxes( EnumSet<RepositoryFilePermission> permissionEnumSet ) { updateCheckboxes( false, permissionEnumSet ); }
@Test public void testUpdateCheckboxesDeletePermissionsAppropriateTrue() { permissionsCheckboxHandler.updateCheckboxes( true, EnumSet.of( RepositoryFilePermission.DELETE, RepositoryFilePermission.WRITE, RepositoryFilePermission.READ ) ); verify( readCheckbox, times( 1 ) ).setChecked( true ); verify( writeCheckbox, times( 1 ) ).setChecked( true ); verify( deleteCheckbox, times( 1 ) ).setChecked( true ); verify( manageCheckbox, times( 1 ) ).setChecked( false ); verify( readCheckbox, times( 1 ) ).setDisabled( true ); verify( writeCheckbox, times( 1 ) ).setDisabled( true ); verify( deleteCheckbox, times( 1 ) ).setDisabled( false ); verify( manageCheckbox, times( 1 ) ).setDisabled( false ); }
@Override public boolean putFilter(File file) { if (!filterRegistry.isMutable()) { return false; } try { String sName = file.getAbsolutePath(); if (filterClassLastModified.get(sName) != null && (file.lastModified() != filterClassLastModified.get(sName))) { LOG.debug("reloading filter {}", sName); filterRegistry.remove(sName); } ZuulFilter<?, ?> filter = filterRegistry.get(sName); if (filter == null) { Class<?> clazz = compiler.compile(file); if (!Modifier.isAbstract(clazz.getModifiers())) { filter = filterFactory.newInstance(clazz); putFilter(sName, filter, file.lastModified()); return true; } } } catch (Exception e) { LOG.error("Error loading filter! Continuing. file={}", file, e); return false; } return false; }
@Test void testGetFilterFromFile() throws Exception { assertTrue(loader.putFilter(file)); Collection<ZuulFilter<?, ?>> filters = registry.getAllFilters(); assertEquals(1, filters.size()); }
@Bean public ShenyuPlugin requestPlugin() { return new RequestPlugin(); }
@Test public void testRequestPlugin() { new ApplicationContextRunner() .withConfiguration(AutoConfigurations.of(RequestPluginConfiguration.class)) .withBean(RequestPluginConfigurationTest.class) .withPropertyValues("debug=true") .run(context -> { assertThat(context).hasSingleBean(RequestPlugin.class); ShenyuPlugin plugin = context.getBean("requestPlugin", ShenyuPlugin.class); assertThat(plugin instanceof RequestPlugin).isEqualTo(true); assertThat(plugin.named()).isEqualTo(PluginEnum.REQUEST.getName()); assertNotNull(plugin); }); }
public static void initSSL(Properties consumerProps) { // Check if one-way SSL is enabled. In this scenario, the client validates the server certificate. String trustStoreLocation = consumerProps.getProperty(SSL_TRUSTSTORE_LOCATION); String trustStorePassword = consumerProps.getProperty(SSL_TRUSTSTORE_PASSWORD); String serverCertificate = consumerProps.getProperty(STREAM_KAFKA_SSL_SERVER_CERTIFICATE); if (StringUtils.isAnyEmpty(trustStoreLocation, trustStorePassword, serverCertificate)) { LOGGER.info("Skipping auto SSL server validation since it's not configured."); return; } if (shouldRenewTrustStore(consumerProps)) { initTrustStore(consumerProps); } // Set the security protocol String securityProtocol = consumerProps.getProperty(SECURITY_PROTOCOL, DEFAULT_SECURITY_PROTOCOL); consumerProps.setProperty(SECURITY_PROTOCOL, securityProtocol); // Check if two-way SSL is enabled. In this scenario, the client validates the server's certificate and the server // validates the client's certificate. String keyStoreLocation = consumerProps.getProperty(SSL_KEYSTORE_LOCATION); String keyStorePassword = consumerProps.getProperty(SSL_KEYSTORE_PASSWORD); String keyPassword = consumerProps.getProperty(SSL_KEY_PASSWORD); String clientCertificate = consumerProps.getProperty(STREAM_KAFKA_SSL_CLIENT_CERTIFICATE); if (StringUtils.isAnyEmpty(keyStoreLocation, keyStorePassword, keyPassword, clientCertificate)) { LOGGER.info("Skipping auto SSL client validation since it's not configured."); return; } if (shouldRenewKeyStore(consumerProps)) { initKeyStore(consumerProps); } }
@Test public void testInitSSLAndRenewCertificates() throws CertificateException, NoSuchAlgorithmException, OperatorCreationException, NoSuchProviderException, IOException, KeyStoreException { Properties consumerProps = new Properties(); setTrustStoreProps(consumerProps); setKeyStoreProps(consumerProps); KafkaSSLUtils.initSSL(consumerProps); // renew the truststore and keystore setTrustStoreProps(consumerProps); setKeyStoreProps(consumerProps); KafkaSSLUtils.initSSL(consumerProps); // validate validateTrustStoreCertificateCount(1); validateKeyStoreCertificateCount(1); }
public void expand(String key, long value, RangeHandler rangeHandler, EdgeHandler edgeHandler) { if (value < lowerBound || value > upperBound) { // Value outside bounds -> expand to nothing. return; } int maxLevels = value > 0 ? maxPositiveLevels : maxNegativeLevels; int sign = value > 0 ? 1 : -1; // Append key to feature string builder StringBuilder builder = new StringBuilder(128); builder.append(key).append('='); long levelSize = arity; long edgeInterval = (value / arity) * arity; edgeHandler.handleEdge(createEdgeFeatureHash(builder, edgeInterval), (int) Math.abs(value - edgeInterval)); for (int i = 0; i < maxLevels; ++i) { long start = (value / levelSize) * levelSize; if (Math.abs(start) + levelSize - 1 < 0) { // overflow break; } rangeHandler.handleRange(createRangeFeatureHash(builder, start, start + sign * (levelSize - 1))); levelSize *= arity; if (levelSize <= 0 && levelSize != Long.MIN_VALUE) { //overflow break; } } }
@Test void requireThatSearchCloseToUnevenUpperBoundIsSensible() { PredicateRangeTermExpander expander = new PredicateRangeTermExpander(10, -99, 1234); Iterator<String> expectedLabels = List.of( "key=40-49", "key=0-99", "key=0-999", "key=0-9999").iterator(); expander.expand("key", 42, range -> assertEquals(PredicateHash.hash64(expectedLabels.next()), range), (edge, value) -> { assertEquals(PredicateHash.hash64("key=40"), edge); assertEquals(2, value); }); assertFalse(expectedLabels.hasNext()); }
public ExtPlugin getExtPlugin() { return extPlugin; }
@Test public void testExtPlugin() { ShenyuConfig.ExtPlugin extPlugin = config.getExtPlugin(); extPlugin.setThreads(5); extPlugin.setPath("test"); extPlugin.setEnabled(true); extPlugin.setScheduleDelay(5); String path = extPlugin.getPath(); Integer threads = extPlugin.getThreads(); Boolean enabled = extPlugin.getEnabled(); Integer scheduleDelay = extPlugin.getScheduleDelay(); Integer scheduleTime = extPlugin.getScheduleTime(); notEmptyElements(enabled, path, scheduleTime, scheduleDelay, threads); }
public static List<String> splitPlainTextParagraphs( List<String> lines, int maxTokensPerParagraph) { return internalSplitTextParagraphs( lines, maxTokensPerParagraph, (text) -> internalSplitLines( text, maxTokensPerParagraph, false, s_plaintextSplitOptions)); }
@Test public void canSplitTextParagraphsOnCommas() { List<String> input = Arrays.asList( "This is a test of the emergency broadcast system, This is only a test", "We repeat, this is only a test, A unit test", "A small note, And another, And once again, Seriously, this is the end," + " We're finished, All set, Bye.", "Done."); List<String> expected = Arrays.asList( "This is a test of the emergency broadcast system,", "This is only a test", "We repeat, this is only a test, A unit test", "A small note, And another, And once again, Seriously,", "this is the end, We're finished, All set, Bye." + "\n" + "Done."); List<String> result = TextChunker.splitPlainTextParagraphs(input, 15); Assertions.assertEquals(expected, result); }
public static Builder builder() { return new Builder(); }
@TestTemplate public void rowDeltaWithDuplicates() { assertThat(listManifestFiles()).isEmpty(); table .newRowDelta() .addRows(FILE_A) .addRows(DataFiles.builder(SPEC).copy(FILE_A).build()) .addRows(FILE_A) .commit(); assertThat(table.currentSnapshot().summary()) .hasSize(11) .containsEntry(SnapshotSummary.ADDED_FILES_PROP, "1") .containsEntry(SnapshotSummary.ADDED_FILE_SIZE_PROP, "10") .containsEntry(SnapshotSummary.ADDED_RECORDS_PROP, "1") .containsEntry(SnapshotSummary.CHANGED_PARTITION_COUNT_PROP, "1") .containsEntry(SnapshotSummary.TOTAL_DATA_FILES_PROP, "1") .containsEntry(SnapshotSummary.TOTAL_DELETE_FILES_PROP, "0") .containsEntry(SnapshotSummary.TOTAL_EQ_DELETES_PROP, "0") .containsEntry(SnapshotSummary.TOTAL_POS_DELETES_PROP, "0") .containsEntry(SnapshotSummary.TOTAL_FILE_SIZE_PROP, "10") .containsEntry(SnapshotSummary.TOTAL_RECORDS_PROP, "1"); }
@Override public boolean equals(Object that) { if (that instanceof IdResponse) { IdResponse<?> thatIdResponse = (IdResponse<?>) that; return (this._key == null)? thatIdResponse._key == null : this._key.equals(thatIdResponse._key); } else { return false; } }
@Test public void testEquals() { IdResponse<Long> longResponse1 = new IdResponse<>(1L); IdResponse<Long> longResponse2 = new IdResponse<>(1L); IdResponse<Long> nullLongResponse = new IdResponse<>(null); IdResponse<String> stringResponse = new IdResponse<>("hello"); IdResponse<String> nullStringResponse = new IdResponse<>(null); // equals and non-null. Assert.assertTrue(longResponse1.equals(longResponse2)); // equals and null Assert.assertTrue(nullLongResponse.equals(nullStringResponse)); Assert.assertTrue(nullStringResponse.equals(nullLongResponse)); // unequal and non-null Assert.assertFalse(longResponse1.equals(stringResponse)); // unequal and one null Assert.assertFalse(longResponse1.equals(nullLongResponse)); Assert.assertFalse(nullLongResponse.equals(longResponse1)); }
public double frequencyBasedSample(SplittableRandom rng, long totalObservations) { if ((totalObservations != this.totalObservations) || (cdf == null)) { regenerateCDF(totalObservations); } int lookup = Util.sampleFromCDF(cdf,rng); return values[lookup]; }
@Test public void samplingTest() { SplittableRandom rng = new SplittableRandom(1); CategoricalInfo c; c = generateEmptyInfo(); c.frequencyBasedSample(rng, 50); assertEquals(1,c.values.length); assertEquals(0,c.values[0],DELTA); assertEquals(1,c.cdf.length); assertEquals(1.0,c.cdf[0],DELTA); for (int i = 0; i < 50; i++) { assertEquals(0.0,c.frequencyBasedSample(rng, 50),DELTA); } c = generateOneValueInfo(); c.frequencyBasedSample(rng, 50); assertEquals(2,c.values.length); assertEquals(0,c.values[0],DELTA); assertEquals(5,c.values[1],DELTA); assertEquals(2,c.values.length); assertEquals(0.5,c.cdf[0],DELTA); assertEquals(1.0,c.cdf[1],DELTA); double sum = 0; double posCount = 0.0; for (int i = 0; i < NUM_SAMPLES; i++) { double sample = c.frequencyBasedSample(rng, 50); if (sample > DELTA) { posCount++; } sum += sample; } assertEquals(0.5,posCount/NUM_SAMPLES,1e-1); assertEquals(2.5,sum/NUM_SAMPLES,1e-1); c = generateOneValueInfo(); c.frequencyBasedSample(rng,1000); assertEquals(2,c.values.length); assertEquals(0,c.values[0],DELTA); assertEquals(5,c.values[1],DELTA); assertEquals(2,c.values.length); assertEquals(0.975,c.cdf[0],DELTA); assertEquals(1.0,c.cdf[1],DELTA); c = generateFullInfo(); c.frequencyBasedSample(rng,50); assertEquals(5,c.values.length); assertEquals(5,c.cdf.length); checkValueAndProb(c,0.0,0.6); checkValueAndProb(c,-1.0,0.1); checkValueAndProb(c,2.0,0.1); checkValueAndProb(c,3.0,0.1); checkValueAndProb(c,4.0,0.1); c = generateFullInfo(); c.frequencyBasedSample(rng, 100); assertEquals(5,c.values.length); assertEquals(5,c.cdf.length); checkValueAndProb(c,0.0,0.8); checkValueAndProb(c,-1.0,0.05); checkValueAndProb(c,2.0,0.05); checkValueAndProb(c,3.0,0.05); checkValueAndProb(c,4.0,0.05); }
public int format(String... args) throws UsageException { CommandLineOptions parameters = processArgs(args); if (parameters.version()) { errWriter.println(versionString()); return 0; } if (parameters.help()) { throw new UsageException(); } JavaFormatterOptions options = JavaFormatterOptions.builder() .style(parameters.aosp() ? Style.AOSP : Style.GOOGLE) .formatJavadoc(parameters.formatJavadoc()) .build(); if (parameters.stdin()) { return formatStdin(parameters, options); } else { return formatFiles(parameters, options); } }
@Test public void keepGoingWhenFilesDontExist() throws Exception { Path a = testFolder.newFile("A.java").toPath(); Path b = testFolder.newFile("B.java").toPath(); File cFile = testFolder.newFile("C.java"); Path c = cFile.toPath(); cFile.delete(); Files.write(a, "class A{}\n".getBytes(UTF_8)); Files.write(b, "class B{}\n".getBytes(UTF_8)); StringWriter out = new StringWriter(); StringWriter err = new StringWriter(); Main main = new Main(new PrintWriter(out, true), new PrintWriter(err, true), System.in); int exitCode = main.format( "", a.toAbsolutePath().toString(), c.toAbsolutePath().toString(), b.toAbsolutePath().toString()); // Formatter returns failure if a file was not present. assertThat(exitCode).isEqualTo(1); // Present files were correctly formatted. assertThat(out.toString()).isEqualTo("class A {}\nclass B {}\n"); // File not found still showed error. assertThat(err.toString()).isNotEmpty(); }
static TimelineFilterList parseMetricFilters(String expr) throws TimelineParseException { return parseFilters(new TimelineParserForNumericFilters(expr)); }
@Test void testMetricFiltersParsing() throws Exception { String expr = "(((key11 ne 234 AND key12 gt 23) AND " + "(key13 lt 34 OR key14 ge 567)) OR (key21 lt 24 OR key22 le 45))"; TimelineFilterList expectedList = new TimelineFilterList( Operator.OR, new TimelineFilterList( Operator.AND, new TimelineFilterList( Operator.AND, new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "key11", 234, false), new TimelineCompareFilter(TimelineCompareOp.GREATER_THAN, "key12", 23, true) ), new TimelineFilterList( Operator.OR, new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "key13", 34, true), new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, "key14", 567, true) ) ), new TimelineFilterList( Operator.OR, new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "key21", 24, true), new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "key22", 45, true) ) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); expr = "abc ene 234"; expectedList = new TimelineFilterList( new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "abc", 234, true) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); expr = "abc ne 234"; expectedList = new TimelineFilterList( new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "abc", 234, false) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); expr = "abc ne 234 AND def gt 23"; expectedList = new TimelineFilterList( new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "abc", 234, false), new TimelineCompareFilter(TimelineCompareOp.GREATER_THAN, "def", 23, true) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); expr = "(abc ne 234 AND def gt 23)"; expectedList = new TimelineFilterList( new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "abc", 234, false), new TimelineCompareFilter(TimelineCompareOp.GREATER_THAN, "def", 23, true) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); expr = "abc ne 234 AND def gt 23 OR rst lt 24"; expectedList = new TimelineFilterList( Operator.OR, new TimelineFilterList( new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "abc", 234, false), new TimelineCompareFilter(TimelineCompareOp.GREATER_THAN, "def", 23, true) ), new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "rst", 24, true) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); expr = "abc ne 234 AND def gt 23 OR rst lt 24 OR xyz le 456"; expectedList = new TimelineFilterList( Operator.OR, new TimelineFilterList( new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "abc", 234, false), new TimelineCompareFilter(TimelineCompareOp.GREATER_THAN, "def", 23, true) ), new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "rst", 24, true), new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "xyz", 456, true) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); expr = "abc ne 234 AND def gt 23 OR rst lt 24 OR xyz le 456 AND pqr ge 2"; expectedList = new TimelineFilterList( new TimelineFilterList( Operator.OR, new TimelineFilterList( new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "abc", 234, false), new TimelineCompareFilter(TimelineCompareOp.GREATER_THAN, "def", 23, true) ), new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "rst", 24, true), new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "xyz", 456, true) ), new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, "pqr", 2, true) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); // Test with unnecessary spaces. expr = " abc ne 234 AND def gt 23 OR rst lt " + " 24 OR xyz le 456 AND pqr ge 2 "; expectedList = new TimelineFilterList( new TimelineFilterList( Operator.OR, new TimelineFilterList( new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "abc", 234, false), new TimelineCompareFilter(TimelineCompareOp.GREATER_THAN, "def", 23, true) ), new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "rst", 24, true), new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "xyz", 456, true) ), new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, "pqr", 2, true) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); expr = "(((key11 ne 234 AND key12 gt 23 OR key13 lt 24 OR key14 le 456 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "OR key22 le 45))"; expectedList = new TimelineFilterList( Operator.OR, new TimelineFilterList( Operator.AND, new TimelineFilterList( new TimelineFilterList( Operator.OR, new TimelineFilterList( new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "key11", 234, false), new TimelineCompareFilter( TimelineCompareOp.GREATER_THAN, "key12", 23, true) ), new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "key13", 24, true), new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "key14", 456, true) ), new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, "key15", 2, true) ), new TimelineFilterList( Operator.OR, new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "key16", 34, true), new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, "key17", 567, true) ) ), new TimelineFilterList( Operator.OR, new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "key21", 24, true), new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "key22", 45, true) ) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); expr = " ( ( ( key11 ne 234 AND key12 gt " + "23 OR key13 lt 24 OR key14 le 456 AND key15 ge 2" + " ) AND ( key16 lt 34 OR key17 ge 567 ) ) OR " + "( key21 lt 24 OR key22 le 45 ) ) "; expectedList = new TimelineFilterList( Operator.OR, new TimelineFilterList( Operator.AND, new TimelineFilterList( new TimelineFilterList( Operator.OR, new TimelineFilterList( new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, "key11", 234, false), new TimelineCompareFilter( TimelineCompareOp.GREATER_THAN, "key12", 23, true) ), new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "key13", 24, true), new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "key14", 456, true) ), new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, "key15", 2, true) ), new TimelineFilterList( Operator.OR, new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "key16", 34, true), new TimelineCompareFilter(TimelineCompareOp.GREATER_OR_EQUAL, "key17", 567, true) ) ), new TimelineFilterList( Operator.OR, new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, "key21", 24, true), new TimelineCompareFilter(TimelineCompareOp.LESS_OR_EQUAL, "key22", 45, true) ) ); verifyFilterList(expr, TimelineReaderWebServicesUtils.parseMetricFilters(expr), expectedList); expr = "(((key11 ne 234 AND key12 gt 23 OR key13 lt 24 OR key14 le 456 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "OR key22 le 45)"; try { TimelineReaderWebServicesUtils.parseMetricFilters(expr); fail("Improper brackers. Exception should have been thrown."); } catch (TimelineParseException e) { } expr = "(((key11 ne 234 AND key12 gt v3 OR key13 lt 24 OR key14 le 456 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "OR key22 le 45))"; try { TimelineReaderWebServicesUtils.parseMetricFilters(expr); fail("Non Numeric value. Exception should have been thrown."); } catch (TimelineParseException e) { } expr = "(((key11 ne (234 AND key12 gt 3 OR key13 lt 24 OR key14 le 456 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "OR key22 le 45))"; try { TimelineReaderWebServicesUtils.parseMetricFilters(expr); fail("Unexpected opening bracket. Exception should have been thrown."); } catch (TimelineParseException e) { } expr = "(((k)ey11 ne 234 AND key12 gt 3 OR key13 lt 24 OR key14 le 456 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "OR key22 le 45))"; try { TimelineReaderWebServicesUtils.parseMetricFilters(expr); fail("Unexpected closing bracket. Exception should have been thrown."); } catch (TimelineParseException e) { } expr = "(((key11 rs 234 AND key12 gt 3 OR key13 lt 24 OR key14 le 456 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "OR key22 le 45))"; try { TimelineReaderWebServicesUtils.parseMetricFilters(expr); fail("Improper compare op. Exception should have been thrown."); } catch (TimelineParseException e) { } expr = "(((key11 ne 234 PI key12 gt 3 OR key13 lt 24 OR key14 le 456 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "OR key22 le 45))"; try { TimelineReaderWebServicesUtils.parseMetricFilters(expr); fail("Improper op. Exception should have been thrown."); } catch (TimelineParseException e) { } expr = "(((key11 ne 234 PI key12 gt 3 OR key13 lt 24 OR key14 le 456 " + "AND key15 ge 2) AND (key16 lt 34 OR key17 ge 567)) OR (key21 lt 24 " + "OR key22 le 45))"; try { TimelineReaderWebServicesUtils.parseMetricFilters(expr); fail("Improper op. Exception should have been thrown."); } catch (TimelineParseException e) { } expr = "(key11 ne 234 AND key12 gt 3)) OR (key13 lt 24 OR key14 le 456)"; try { TimelineReaderWebServicesUtils.parseMetricFilters(expr); fail("Unbalanced brackets. Exception should have been thrown."); } catch (TimelineParseException e) { } expr = "(key11 rne 234 AND key12 gt 3) OR (key13 lt 24 OR key14 le 456)"; try { TimelineReaderWebServicesUtils.parseMetricFilters(expr); fail("Invalid compareop. Exception should have been thrown."); } catch (TimelineParseException e) { } expr = "(key11 ne 234 AND key12 gt 3) OR (key13 lt 24 OR key14 le"; try { TimelineReaderWebServicesUtils.parseMetricFilters(expr); fail("Compareop cant be parsed. Exception should have been thrown."); } catch (TimelineParseException e) { } assertNull(TimelineReaderWebServicesUtils.parseMetricFilters(null)); assertNull(TimelineReaderWebServicesUtils.parseMetricFilters(" ")); }
@Override public MetadataNode child(String name) { try { Integer brokerId = Integer.valueOf(name); BrokerRegistration registration = image.brokers().get(brokerId); if (registration == null) return null; return new MetadataLeafNode(registration.toString()); } catch (NumberFormatException e) { return null; } }
@Test public void testUnknownChild() { assertNull(NODE.child("2")); }
public static NetFlowV5Packet parsePacket(ByteBuf bb) { final int readableBytes = bb.readableBytes(); final NetFlowV5Header header = parseHeader(bb.slice(bb.readerIndex(), HEADER_LENGTH)); final int packetLength = HEADER_LENGTH + header.count() * RECORD_LENGTH; if (header.count() <= 0 || readableBytes < packetLength) { throw new CorruptFlowPacketException("Insufficient data (expected: " + packetLength + " bytes, actual: " + readableBytes + " bytes)"); } final ImmutableList.Builder<NetFlowV5Record> records = ImmutableList.builder(); int offset = HEADER_LENGTH; for (int i = 0; i < header.count(); i++) { records.add(parseRecord(bb.slice(offset + bb.readerIndex(), RECORD_LENGTH))); offset += RECORD_LENGTH; } return NetFlowV5Packet.create(header, records.build(), offset); }
@Test public void testParse1() throws IOException { final byte[] b = Resources.toByteArray(Resources.getResource("netflow-data/netflow-v5-1.dat")); NetFlowV5Packet packet = NetFlowV5Parser.parsePacket(Unpooled.wrappedBuffer(b)); assertNotNull(packet); NetFlowV5Header h = packet.header(); assertEquals(5, h.version()); assertEquals(2, h.count()); assertEquals(3381L, h.sysUptime()); assertEquals(1430591888L, h.unixSecs()); assertEquals(280328000, h.unixNsecs()); final List<NetFlowV5Record> records = packet.records(); assertEquals(2, records.size()); final NetFlowV5Record record1 = records.get(0); assertEquals(InetAddresses.forString("10.0.2.15"), record1.dstAddr()); assertEquals(6, record1.protocol()); assertEquals(0, record1.srcAs()); assertEquals(InetAddresses.forString("10.0.2.2"), record1.srcAddr()); assertEquals(2577L, record1.last()); assertEquals(22, record1.dstPort()); assertEquals(230L, record1.octetCount()); assertEquals(54435, record1.srcPort()); assertEquals(0, record1.srcMask()); assertEquals(0, record1.tos()); assertEquals(0, record1.inputIface()); assertEquals(InetAddresses.forString("0.0.0.0"), record1.nextHop()); assertEquals(16, record1.tcpFlags()); assertEquals(0, record1.dstAs()); assertEquals(0, record1.outputIface()); assertEquals(4294967295L, record1.first()); assertEquals(0, record1.dstMask()); assertEquals(5L, record1.packetCount()); final NetFlowV5Record record2 = records.get(1); assertEquals(InetAddresses.forString("10.0.2.2"), record2.dstAddr()); assertEquals(6, record2.protocol()); assertEquals(0, record2.srcAs()); assertEquals(InetAddresses.forString("10.0.2.15"), record2.srcAddr()); assertEquals(2577L, record2.last()); assertEquals(54435, record2.dstPort()); assertEquals(304L, record2.octetCount()); assertEquals(22, record2.srcPort()); assertEquals(0, record2.srcMask()); assertEquals(0, record2.tos()); assertEquals(0, record2.inputIface()); assertEquals(InetAddresses.forString("0.0.0.0"), record2.nextHop()); assertEquals(24, record2.tcpFlags()); assertEquals(0, record2.dstAs()); assertEquals(0, record2.outputIface()); assertEquals(4294967295L, record2.first()); assertEquals(0, record2.dstMask()); assertEquals(4L, record2.packetCount()); }
public void processConsumeResult( final ConsumeConcurrentlyStatus status, final ConsumeConcurrentlyContext context, final ConsumeRequest consumeRequest) { if (consumeRequest.getMsgs().isEmpty()) { return; } int ackIndex = context.getAckIndex(); String topic = consumeRequest.getMessageQueue().getTopic(); switch (status) { case CONSUME_SUCCESS: if (ackIndex >= consumeRequest.getMsgs().size()) { ackIndex = consumeRequest.getMsgs().size() - 1; } int ok = ackIndex + 1; int failed = consumeRequest.getMsgs().size() - ok; this.getConsumerStatsManager().incConsumeOKTPS(consumerGroup, topic, ok); this.getConsumerStatsManager().incConsumeFailedTPS(consumerGroup, topic, failed); break; case RECONSUME_LATER: ackIndex = -1; this.getConsumerStatsManager().incConsumeFailedTPS(consumerGroup, topic, consumeRequest.getMsgs().size()); break; default: break; } //ack if consume success for (int i = 0; i <= ackIndex; i++) { this.defaultMQPushConsumerImpl.ackAsync(consumeRequest.getMsgs().get(i), consumerGroup); consumeRequest.getPopProcessQueue().ack(); } //consume later if consume fail for (int i = ackIndex + 1; i < consumeRequest.getMsgs().size(); i++) { MessageExt msgExt = consumeRequest.getMsgs().get(i); consumeRequest.getPopProcessQueue().ack(); if (msgExt.getReconsumeTimes() >= this.defaultMQPushConsumerImpl.getMaxReconsumeTimes()) { checkNeedAckOrDelay(msgExt); continue; } int delayLevel = context.getDelayLevelWhenNextConsume(); changePopInvisibleTime(consumeRequest.getMsgs().get(i), consumerGroup, delayLevel); } }
@Test public void testProcessConsumeResult() { ConsumeConcurrentlyContext context = mock(ConsumeConcurrentlyContext.class); ConsumeMessagePopConcurrentlyService.ConsumeRequest consumeRequest = mock(ConsumeMessagePopConcurrentlyService.ConsumeRequest.class); when(consumeRequest.getMsgs()).thenReturn(Arrays.asList(createMessageExt(), createMessageExt())); MessageQueue messageQueue = mock(MessageQueue.class); when(messageQueue.getTopic()).thenReturn(defaultTopic); when(consumeRequest.getMessageQueue()).thenReturn(messageQueue); PopProcessQueue processQueue = mock(PopProcessQueue.class); when(processQueue.ack()).thenReturn(0); when(consumeRequest.getPopProcessQueue()).thenReturn(processQueue); when(defaultMQPushConsumerImpl.getPopDelayLevel()).thenReturn(new int[]{1, 10}); popService.processConsumeResult(ConsumeConcurrentlyStatus.CONSUME_SUCCESS, context, consumeRequest); verify(defaultMQPushConsumerImpl, times(1)).ackAsync(any(MessageExt.class), any()); }
@Override @Nonnull public <T> Future<T> submit(@Nonnull Callable<T> task) { throwRejectedExecutionExceptionIfShutdown(); try { T result = task.call(); return new CompletedFuture<>(result, null); } catch (Exception e) { return new CompletedFuture<>(null, e); } }
@Test void testSubmitRunnableWithResultAndNoopShutdown() { final CompletableFuture<Thread> future = new CompletableFuture<>(); testWithNoopShutdown( testInstance -> testInstance.submit(() -> future.complete(Thread.currentThread()), null)); assertThat(future).isCompletedWithValue(Thread.currentThread()); }
public static String version() { if (null == VERSION.get()) { String detectedVersion; try { detectedVersion = versionFromJar(); // use unknown version in case exact implementation version can't be found from the jar // (this can happen if the DataStream class appears multiple times in the same classpath // such as with shading) detectedVersion = detectedVersion != null ? detectedVersion : FLINK_UNKNOWN_VERSION; } catch (Exception e) { detectedVersion = FLINK_UNKNOWN_VERSION; } VERSION.set(detectedVersion); } return VERSION.get(); }
@Test public void testVersion() { assertThat(FlinkPackage.version()).isEqualTo("1.18.1"); }
public static Optional<ShardingConditionValue> generate(final ExpressionSegment predicate, final Column column, final List<Object> params, final TimestampServiceRule timestampServiceRule) { if (predicate instanceof BinaryOperationExpression) { return COMPARE_OPERATOR_GENERATOR.generate((BinaryOperationExpression) predicate, column, params, timestampServiceRule); } if (predicate instanceof InExpression) { return IN_OPERATOR_GENERATOR.generate((InExpression) predicate, column, params, timestampServiceRule); } if (predicate instanceof BetweenExpression) { return BETWEEN_OPERATOR_GENERATOR.generate((BetweenExpression) predicate, column, params, timestampServiceRule); } return Optional.empty(); }
@Test void assertGenerateBinaryOperationIsExpression() { ConditionValueCompareOperatorGenerator conditionValueCompareOperatorGenerator = new ConditionValueCompareOperatorGenerator(); BinaryOperationExpression rightValue = new BinaryOperationExpression(0, 0, mock(ColumnSegment.class), new LiteralExpressionSegment(0, 0, "null"), "IS", null); Optional<ShardingConditionValue> actual = conditionValueCompareOperatorGenerator.generate(rightValue, column, new LinkedList<>(), mock(TimestampServiceRule.class)); Optional<ShardingConditionValue> expected = ConditionValueGeneratorFactory.generate(rightValue, column, new LinkedList<>(), mock(TimestampServiceRule.class)); assertTrue(actual.isPresent() && expected.isPresent()); assertThat(actual.get().getTableName(), is(expected.get().getTableName())); assertThat(actual.get().getColumnName(), is(expected.get().getColumnName())); }
@Override public long longValue() { return value; }
@Test public void testLongValue() { assertEquals(100, MilliPct.ofMilliPct(100).longValue()); assertEquals(-100, MilliPct.ofMilliPct(-100).longValue()); }
public CompletableFuture<Map<TopicIdPartition, ShareAcknowledgeResponseData.PartitionData>> releaseAcquiredRecords( String groupId, String memberId ) { log.trace("Release acquired records request for groupId: {}, memberId: {}", groupId, memberId); List<TopicIdPartition> topicIdPartitions = cachedTopicIdPartitionsInShareSession( groupId, Uuid.fromString(memberId)); if (topicIdPartitions.isEmpty()) { return CompletableFuture.completedFuture(Collections.emptyMap()); } Map<TopicIdPartition, CompletableFuture<Errors>> futuresMap = new HashMap<>(); topicIdPartitions.forEach(topicIdPartition -> { SharePartition sharePartition = partitionCacheMap.get(sharePartitionKey(groupId, topicIdPartition)); if (sharePartition == null) { log.error("No share partition found for groupId {} topicPartition {} while releasing acquired topic partitions", groupId, topicIdPartition); futuresMap.put(topicIdPartition, CompletableFuture.completedFuture(Errors.UNKNOWN_TOPIC_OR_PARTITION)); } else { CompletableFuture<Errors> future = sharePartition.releaseAcquiredRecords(memberId).thenApply(throwable -> { if (throwable.isPresent()) { return Errors.forException(throwable.get()); } return Errors.NONE; }); futuresMap.put(topicIdPartition, future); } }); CompletableFuture<Void> allFutures = CompletableFuture.allOf( futuresMap.values().toArray(new CompletableFuture[futuresMap.size()])); return allFutures.thenApply(v -> { Map<TopicIdPartition, ShareAcknowledgeResponseData.PartitionData> result = new HashMap<>(); futuresMap.forEach((topicIdPartition, future) -> result.put(topicIdPartition, new ShareAcknowledgeResponseData.PartitionData() .setPartitionIndex(topicIdPartition.partition()) .setErrorCode(future.join().code()))); return result; }); }
@Test public void testReleaseAcquiredRecordsWithIncorrectMemberId() { String groupId = "grp"; Uuid memberId = Uuid.randomUuid(); TopicIdPartition tp1 = new TopicIdPartition(memberId, new TopicPartition("foo", 0)); ShareSessionCache cache = mock(ShareSessionCache.class); ShareSession shareSession = mock(ShareSession.class); // Member with random Uuid so that it does not match the memberId. when(cache.get(new ShareSessionKey(groupId, Uuid.randomUuid()))).thenReturn(shareSession); ImplicitLinkedHashCollection<CachedSharePartition> partitionMap = new ImplicitLinkedHashCollection<>(3); partitionMap.add(new CachedSharePartition(tp1)); when(shareSession.partitionMap()).thenReturn(partitionMap); SharePartitionManager sharePartitionManager = SharePartitionManagerBuilder.builder() .withCache(cache) .build(); CompletableFuture<Map<TopicIdPartition, ShareAcknowledgeResponseData.PartitionData>> resultFuture = sharePartitionManager.releaseAcquiredRecords(groupId, memberId.toString()); Map<TopicIdPartition, ShareAcknowledgeResponseData.PartitionData> result = resultFuture.join(); assertTrue(result.isEmpty()); }
@Override public boolean alterOffsets(Map<String, String> connectorConfig, Map<Map<String, ?>, Map<String, ?>> offsets) { AbstractConfig config = new AbstractConfig(CONFIG_DEF, connectorConfig); String filename = config.getString(FILE_CONFIG); if (filename == null || filename.isEmpty()) { throw new ConnectException("Offsets cannot be modified if the '" + FILE_CONFIG + "' configuration is unspecified. " + "This is because stdin is used for input and offsets are not tracked."); } // This connector makes use of a single source partition at a time which represents the file that it is configured to read from. // However, there could also be source partitions from previous configurations of the connector. for (Map.Entry<Map<String, ?>, Map<String, ?>> partitionOffset : offsets.entrySet()) { Map<String, ?> offset = partitionOffset.getValue(); if (offset == null) { // We allow tombstones for anything; if there's garbage in the offsets for the connector, we don't // want to prevent users from being able to clean it up using the REST API continue; } if (!offset.containsKey(POSITION_FIELD)) { throw new ConnectException("Offset objects should either be null or contain the key '" + POSITION_FIELD + "'"); } // The 'position' in the offset represents the position in the file's byte stream and should be a non-negative long value if (!(offset.get(POSITION_FIELD) instanceof Long)) { throw new ConnectException("The value for the '" + POSITION_FIELD + "' key in the offset is expected to be a Long value"); } long offsetPosition = (Long) offset.get(POSITION_FIELD); if (offsetPosition < 0) { throw new ConnectException("The value for the '" + POSITION_FIELD + "' key in the offset should be a non-negative value"); } Map<String, ?> partition = partitionOffset.getKey(); if (partition == null) { throw new ConnectException("Partition objects cannot be null"); } if (!partition.containsKey(FILENAME_FIELD)) { throw new ConnectException("Partition objects should contain the key '" + FILENAME_FIELD + "'"); } } // Let the task check whether the actual value for the offset position is valid for the configured file on startup return true; }
@Test public void testAlterOffsetsTombstones() { Function<Map<String, ?>, Boolean> alterOffsets = partition -> connector.alterOffsets( sourceProperties, Collections.singletonMap(partition, null) ); assertTrue(alterOffsets.apply(null)); assertTrue(alterOffsets.apply(Collections.emptyMap())); assertTrue(alterOffsets.apply(Collections.singletonMap(FILENAME_FIELD, FILENAME))); assertTrue(alterOffsets.apply(Collections.singletonMap(FILENAME_FIELD, "/someotherfilename"))); assertTrue(alterOffsets.apply(Collections.singletonMap("garbage_partition_key", "garbage_partition_value"))); }
@Override public void eventAdded( KettleLoggingEvent event ) { Object messageObject = event.getMessage(); checkNotNull( messageObject, "Expected log message to be defined." ); if ( messageObject instanceof LogMessage ) { LogMessage message = (LogMessage) messageObject; LoggingObjectInterface loggingObject = logObjProvider.apply( message.getLogChannelId() ); if ( loggingObject == null || ( loggingObject.getObjectType() == GENERAL && "Y".equals( EnvUtil.getSystemProperty( Const.KETTLE_LOG_GENERAL_OBJECTS_TO_DI_LOGGER ) ) ) ) { // this can happen if logObject has been discarded while log events are still in flight. logToLogger( diLogger, message.getLevel(), message.getSubject() + " " + message.getMessage() ); } else if ( loggingObject.getObjectType() == TRANS || loggingObject.getObjectType() == STEP || loggingObject.getObjectType() == DATABASE ) { logToLogger( transLogger, message.getLevel(), loggingObject, message ); } else if ( loggingObject.getObjectType() == JOB || loggingObject.getObjectType() == JOBENTRY ) { logToLogger( jobLogger, message.getLevel(), loggingObject, message ); } } }
@Test public void testTransWithAndWithoutFilename() { when( logObjProvider.apply( logChannelId ) ).thenReturn( loggingObject ); when( loggingObject.getObjectType() ).thenReturn( LoggingObjectType.TRANS ); when( loggingObject.getObjectName() ).thenReturn( "TestTrans" ); when( loggingObject.getFilename() ).thenReturn( "filename" ); when( loggingObject.getRepositoryDirectory() ).thenReturn( repositoryDirectory ); when( repositoryDirectory.getPath() ).thenReturn( "/" ); when( message.getLevel() ).thenReturn( LogLevel.BASIC ); listener.eventAdded( logEvent ); verify( transLogger ).info( "[filename] " + msgText ); when( repositoryDirectory.getPath() ).thenReturn( testPath ); listener.eventAdded( logEvent ); verify( transLogger ).info( "[" + testPath + "/filename] " + msgText ); when( loggingObject.getFilename() ).thenReturn( null ); listener.eventAdded( logEvent ); verify( transLogger ).info( "[" + testPath + "/TestTrans" + RepositoryObjectType.TRANSFORMATION.getExtension() + "] " + msgText ); }
public static Instance deepCopy(Instance source) { Instance target = new Instance(); target.setInstanceId(source.getInstanceId()); target.setIp(source.getIp()); target.setPort(source.getPort()); target.setWeight(source.getWeight()); target.setHealthy(source.isHealthy()); target.setEnabled(source.isEnabled()); target.setEphemeral(source.isEphemeral()); target.setClusterName(source.getClusterName()); target.setServiceName(source.getServiceName()); target.setMetadata(new HashMap<>(source.getMetadata())); return target; }
@Test void testDeepCopy() { Instance source = new Instance(); source.setInstanceId("instanceId"); source.setIp("1.1.1.1"); source.setPort(8890); source.setWeight(1); source.setHealthy(true); source.setEnabled(true); source.setEphemeral(true); source.setClusterName("custerName"); source.setServiceName("serviceName"); Map<String, String> metaData = new HashMap<>(); metaData.put("k1", "v1"); metaData.put("k2", "v2"); source.setMetadata(new HashMap<>(metaData)); Instance instance = InstanceUtil.deepCopy(source); assertNotNull(instance); }
@Deprecated public static String getJwt(JwtClaims claims) throws JoseException { String jwt; RSAPrivateKey privateKey = (RSAPrivateKey) getPrivateKey( jwtConfig.getKey().getFilename(),jwtConfig.getKey().getPassword(), jwtConfig.getKey().getKeyName()); // A JWT is a JWS and/or a JWE with JSON claims as the payload. // In this example it is a JWS nested inside a JWE // So we first create a JsonWebSignature object. JsonWebSignature jws = new JsonWebSignature(); // The payload of the JWS is JSON content of the JWT Claims jws.setPayload(claims.toJson()); // The JWT is signed using the sender's private key jws.setKey(privateKey); // Get provider from security config file, it should be two digit // And the provider id will set as prefix for keyid in the token header, for example: 05100 // if there is no provider id, we use "00" for the default value String provider_id = ""; if (jwtConfig.getProviderId() != null) { provider_id = jwtConfig.getProviderId(); if (provider_id.length() == 1) { provider_id = "0" + provider_id; } else if (provider_id.length() > 2) { logger.error("provider_id defined in the security.yml file is invalid; the length should be 2"); provider_id = provider_id.substring(0, 2); } } jws.setKeyIdHeaderValue(provider_id + jwtConfig.getKey().getKid()); // Set the signature algorithm on the JWT/JWS that will integrity protect the claims jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256); // Sign the JWS and produce the compact serialization, which will be the inner JWT/JWS // representation, which is a string consisting of three dot ('.') separated // base64url-encoded parts in the form Header.Payload.Signature jwt = jws.getCompactSerialization(); return jwt; }
@Test public void normalPetStoreJwt() throws Exception { JwtClaims claims = ClaimsUtil.getTestClaims("steve", "EMPLOYEE", "f7d42348-c647-4efb-a52d-4c5787421e72", Arrays.asList("write:pets", "read:pets"), "user"); claims.setExpirationTimeMinutesInTheFuture(10); String jwt = JwtIssuer.getJwt(claims, long_kid, KeyUtil.deserializePrivateKey(long_key, KeyUtil.RSA)); System.out.println("***JWT***: " + jwt); }
void handleStart(Exchange exchange, MetricRegistry registry, String metricsName) { String propertyName = getPropertyName(metricsName); Timer.Context context = getTimerContextFromExchange(exchange, propertyName); if (context == null) { Timer timer = registry.timer(metricsName); context = timer.time(); exchange.setProperty(propertyName, context); } else { LOG.warn("Timer \"{}\" already running", metricsName); } }
@Test public void testHandleStartAlreadyRunning() { when(exchange.getProperty(PROPERTY_NAME, Timer.Context.class)).thenReturn(context); producer.handleStart(exchange, registry, METRICS_NAME); inOrder.verify(exchange, times(1)).getProperty(PROPERTY_NAME, Timer.Context.class); inOrder.verifyNoMoreInteractions(); }
public static Schema create(Type type) { switch (type) { case STRING: return new StringSchema(); case BYTES: return new BytesSchema(); case INT: return new IntSchema(); case LONG: return new LongSchema(); case FLOAT: return new FloatSchema(); case DOUBLE: return new DoubleSchema(); case BOOLEAN: return new BooleanSchema(); case NULL: return new NullSchema(); default: throw new AvroRuntimeException("Can't create a: " + type); } }
@Test void intAsLongDefaultValue() { Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.LONG), "doc", 1); assertTrue(field.hasDefaultValue()); assertEquals(1L, field.defaultVal()); assertEquals(1L, GenericData.get().getDefaultValue(field)); }
public CustomFieldMappings mergeWith(final CustomFieldMapping changedMapping) { final Set<CustomFieldMapping> modifiedMappings = new HashSet<>(this); modifiedMappings.removeIf(m -> changedMapping.fieldName().equals(m.fieldName())); modifiedMappings.add(changedMapping); return new CustomFieldMappings(modifiedMappings); }
@Test void testReturnsOriginalMappingsIfMergedWithNullMappings() { CustomFieldMappings customFieldMappings = new CustomFieldMappings(List.of()); assertSame(customFieldMappings, customFieldMappings.mergeWith((CustomFieldMappings) null)); }
public Data getOldValueData() { if (oldValueData == null && serializationService != null) { oldValueData = serializationService.toData(oldValue); } return oldValueData; }
@Test public void testGetOldValueData_withObjectValue() { assertEquals(toData("oldValue"), objectEvent.getOldValueData()); }
public static <T> Supplier<T> recover(Supplier<T> supplier, Predicate<T> resultPredicate, UnaryOperator<T> resultHandler) { return () -> { T result = supplier.get(); if(resultPredicate.test(result)){ return resultHandler.apply(result); } return result; }; }
@Test public void shouldRecoverSupplierFromSpecificExceptions() { Supplier<String> supplier = () -> { throw new IllegalArgumentException("BAM!"); }; Supplier<String> supplierWithRecovery = SupplierUtils.recover(supplier, asList(IllegalArgumentException.class, IOException.class), (ex) -> "Bla"); String result = supplierWithRecovery.get(); assertThat(result).isEqualTo("Bla"); }
public BingTile findParent() { return findParent(zoomLevel - 1); }
@Test public void testFindParent() { assertEquals(BingTile.fromQuadKey("0123").findParent().toQuadKey(), "012"); assertEquals(BingTile.fromQuadKey("1").findParent().toQuadKey(), ""); assertEquals(BingTile.fromQuadKey("0123").findParent(1).toQuadKey(), "0"); assertEquals(BingTile.fromQuadKey("0123").findParent(4).toQuadKey(), "0123"); assertThatThrownBy(() -> BingTile.fromQuadKey("0123").findParent(5)) .hasMessage(format("newZoom must be less than or equal to current zoom %s: %s", 4, 5)); assertThatThrownBy(() -> BingTile.fromQuadKey("").findParent()) .hasMessage(format("newZoom must be greater than or equal to 0: %s", -1)); assertThatThrownBy(() -> BingTile.fromQuadKey("12").findParent(-1)) .hasMessage(format("newZoom must be greater than or equal to 0: %s", -1)); }
String currentZone() { String urlString = String.format("%s/computeMetadata/v1/instance/zone", endpoint); String zoneResponse = callGet(urlString); return lastPartOf(zoneResponse, "/"); }
@Test public void currentZone() { // given stubFor(get(urlEqualTo("/computeMetadata/v1/instance/zone")) .withHeader("Metadata-Flavor", equalTo("Google")) .willReturn(aResponse().withStatus(HttpURLConnection.HTTP_OK).withBody(zoneResponse(ZONE)))); // when String result = gcpMetadataApi.currentZone(); // then assertEquals(ZONE, result); }
public BigDecimal calculateProductGramsForRequiredFiller(Filler filler, BigDecimal fillerGrams) { if (filler == null || fillerGrams == null || fillerGrams.doubleValue() <= 0) { return BigDecimal.valueOf(0); } if (filler.equals(Filler.PROTEIN)) { return calculateProductGramsForRequiredProteins(fillerGrams).setScale(0, RoundingMode.HALF_DOWN); } else if (filler.equals(Filler.CARBOHYDRATE)) { return calculateProductGramsForRequiredCarbohydrates(fillerGrams).setScale(0, RoundingMode.HALF_DOWN); } else if (filler.equals(Filler.FAT)) { return calculateProductGramsForRequiredFats(fillerGrams).setScale(0, RoundingMode.HALF_DOWN); } return BigDecimal.valueOf(0); }
@Test void calculateProductGramsForRequiredFiller_nullFirstParam() { BigDecimal result = product.calculateProductGramsForRequiredFiller(null, BigDecimal.valueOf(123)); assertEquals(BigDecimal.valueOf(0), result); }
@Override public Authentication validateRequest(ServletRequest request, ServletResponse response, boolean mandatory) throws ServerAuthException { JWT_LOGGER.trace("Authentication request received for " + request.toString()); if (!(request instanceof HttpServletRequest) && !(response instanceof HttpServletResponse)) { return Authentication.UNAUTHENTICATED; } String serializedJWT; HttpServletRequest req = (HttpServletRequest) request; // we'll skip the authentication for CORS preflight requests if (HttpMethod.OPTIONS.name().equalsIgnoreCase(req.getMethod())) { return Authentication.NOT_CHECKED; } serializedJWT = getJwtFromBearerAuthorization(req); if (serializedJWT == null) { serializedJWT = getJwtFromCookie(req); } if (serializedJWT == null) { String loginURL = _authenticationProviderUrlGenerator.apply(req); JWT_LOGGER.info("No JWT token found, sending redirect to " + loginURL); try { ((HttpServletResponse) response).sendRedirect(loginURL); return Authentication.SEND_CONTINUE; } catch (IOException e) { JWT_LOGGER.error("Couldn't authenticate request", e); throw new ServerAuthException(e); } } else { try { SignedJWT jwtToken = SignedJWT.parse(serializedJWT); String userName = jwtToken.getJWTClaimsSet().getSubject(); request.setAttribute(JWT_TOKEN_REQUEST_ATTRIBUTE, serializedJWT); UserIdentity identity = login(userName, jwtToken, request); if (identity == null) { ((HttpServletResponse) response).setStatus(HttpStatus.UNAUTHORIZED_401); return Authentication.SEND_FAILURE; } else { return new UserAuthentication(getAuthMethod(), identity); } } catch (ParseException pe) { String loginURL = _authenticationProviderUrlGenerator.apply(req); JWT_LOGGER.warn("Unable to parse the JWT token, redirecting back to the login page", pe); try { ((HttpServletResponse) response).sendRedirect(loginURL); } catch (IOException e) { throw new ServerAuthException(e); } } } return Authentication.SEND_FAILURE; }
@Test public void testFailedLoginWithUserNotFound() throws Exception { UserStore testUserStore = new UserStore(); testUserStore.addUser(TEST_USER_2, SecurityUtils.NO_CREDENTIAL, new String[] {USER_ROLE}); TokenGenerator.TokenAndKeys tokenAndKeys = TokenGenerator.generateToken(TEST_USER); JwtLoginService loginService = new JwtLoginService(new UserStoreAuthorizationService(testUserStore), tokenAndKeys.publicKey(), null); Authenticator.AuthConfiguration configuration = mock(Authenticator.AuthConfiguration.class); expect(configuration.getLoginService()).andReturn(loginService); expect(configuration.getIdentityService()).andReturn(new DefaultIdentityService()); expect(configuration.isSessionRenewedOnAuthentication()).andReturn(true); Request request = niceMock(Request.class); expect(request.getMethod()).andReturn(HttpMethod.GET.asString()); expect(request.getHeader(HttpHeader.AUTHORIZATION.asString())).andReturn(null); request.setAttribute(JwtAuthenticator.JWT_TOKEN_REQUEST_ATTRIBUTE, tokenAndKeys.token()); expectLastCall().andVoid(); expect(request.getCookies()).andReturn(new Cookie[] {new Cookie(JWT_TOKEN, tokenAndKeys.token())}); expect(request.getAttribute(JwtAuthenticator.JWT_TOKEN_REQUEST_ATTRIBUTE)).andReturn(tokenAndKeys.token()); HttpServletResponse response = mock(HttpServletResponse.class); response.setStatus(HttpStatus.UNAUTHORIZED_401); expectLastCall().andVoid(); replay(configuration, request, response); JwtAuthenticator authenticator = new JwtAuthenticator(TOKEN_PROVIDER, JWT_TOKEN); authenticator.setConfiguration(configuration); Authentication authentication = authenticator.validateRequest(request, response, true); verify(configuration, request, response); assertNotNull(authentication); assertEquals(Authentication.SEND_FAILURE, authentication); }
@Override @DSTransactional // 多数据源,使用 @DSTransactional 保证本地事务,以及数据源的切换 public void updateTenant(TenantSaveReqVO updateReqVO) { // 校验存在 TenantDO tenant = validateUpdateTenant(updateReqVO.getId()); // 校验租户名称是否重复 validTenantNameDuplicate(updateReqVO.getName(), updateReqVO.getId()); // 校验租户域名是否重复 validTenantWebsiteDuplicate(updateReqVO.getWebsite(), updateReqVO.getId()); // 校验套餐被禁用 TenantPackageDO tenantPackage = tenantPackageService.validTenantPackage(updateReqVO.getPackageId()); // 更新租户 TenantDO updateObj = BeanUtils.toBean(updateReqVO, TenantDO.class); tenantMapper.updateById(updateObj); // 如果套餐发生变化,则修改其角色的权限 if (ObjectUtil.notEqual(tenant.getPackageId(), updateReqVO.getPackageId())) { updateTenantRoleMenu(tenant.getId(), tenantPackage.getMenuIds()); } }
@Test public void testUpdateTenant_notExists() { // 准备参数 TenantSaveReqVO reqVO = randomPojo(TenantSaveReqVO.class); // 调用, 并断言异常 assertServiceException(() -> tenantService.updateTenant(reqVO), TENANT_NOT_EXISTS); }
@Override public void unsetStoragePolicy(Path src) throws IOException { super.unsetStoragePolicy(fullPath(src)); }
@Test(timeout = 30000) public void testUnsetStoragePolicy() throws Exception { Path storagePolicyPath = new Path("/storagePolicy"); Path chRootedStoragePolicyPath = new Path("/a/b/storagePolicy"); Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); URI chrootUri = URI.create("mockfs://foo/a/b"); ChRootedFileSystem chrootFs = new ChRootedFileSystem(chrootUri, conf); FileSystem mockFs = ((FilterFileSystem) chrootFs.getRawFileSystem()) .getRawFileSystem(); chrootFs.unsetStoragePolicy(storagePolicyPath); verify(mockFs).unsetStoragePolicy(chRootedStoragePolicyPath); }
public void storeNewShortIds( final ReportWorkItemStatusRequest request, final ReportWorkItemStatusResponse reply) { checkArgument( request.getWorkItemStatuses() != null && reply.getWorkItemServiceStates() != null && request.getWorkItemStatuses().size() == reply.getWorkItemServiceStates().size(), "RequestWorkItemStatus request and response are unbalanced, status: %s, states: %s", request.getWorkItemStatuses(), reply.getWorkItemServiceStates()); for (int i = 0; i < request.getWorkItemStatuses().size(); i++) { WorkItemServiceState state = reply.getWorkItemServiceStates().get(i); WorkItemStatus status = request.getWorkItemStatuses().get(i); if (state.getMetricShortId() == null) { continue; } checkArgument( status.getCounterUpdates() != null, "Response has shortids but no corresponding CounterUpdate"); for (MetricShortId shortIdMsg : state.getMetricShortId()) { int metricIndex = MoreObjects.firstNonNull(shortIdMsg.getMetricIndex(), 0); checkArgument( metricIndex < status.getCounterUpdates().size(), "Received aggregate index outside range of sent update %s >= %s", shortIdMsg.getMetricIndex(), status.getCounterUpdates().size()); CounterUpdate update = status.getCounterUpdates().get(metricIndex); cache.insert(update, checkNotNull(shortIdMsg.getShortId(), "Shortid should be non-null")); } } }
@Test public void testValidateShortIdsButNoUpdate() { CounterShortIdCache cache = new CounterShortIdCache(); ReportWorkItemStatusRequest request = new ReportWorkItemStatusRequest(); ReportWorkItemStatusResponse reply = new ReportWorkItemStatusResponse(); request.setWorkItemStatuses(Arrays.asList(new WorkItemStatus())); reply.setWorkItemServiceStates(createWorkServiceState(new Long[] {1000L})); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Response has shortids but no corresponding CounterUpdate"); cache.storeNewShortIds(request, reply); }
public static boolean isDeepLinkBlackList(Activity activity) { if (activity != null) { for (String activityName : mDeepLinkBlackList) { try { Class<?> clazz = Class.forName(activityName); if (clazz.isAssignableFrom(activity.getClass())) { return true; } } catch (Exception e) { SALog.printStackTrace(e); } } } return false; }
@Test public void isDeepLinkBlackList() { Assert.assertFalse(ChannelUtils.isDeepLinkBlackList(Robolectric.setupActivity(DeepLinkManagerTest.DeepActivity.class))); }
@Override public Request transformRequest(Request request, ServiceInstance instance) { if (instance != null) { MetadataContextHolder.get().setLoadbalancer(LOAD_BALANCER_SERVICE_INSTANCE, instance); } return request; }
@Test public void test() throws Throwable { transformer.transformRequest(clientRequest, serviceInstance); assertThat(MetadataContextHolder.get().getLoadbalancerMetadata().get(LOAD_BALANCER_SERVICE_INSTANCE)).isEqualTo(serviceInstance); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() != 9 && data.size() != 11) { onInvalidDataReceived(device, data); return; } final boolean crcPresent = data.size() == 11; if (crcPresent) { final int actualCrc = CRC16.MCRF4XX(data.getValue(), 0, 9); final int expectedCrc = data.getIntValue(Data.FORMAT_UINT16_LE, 9); if (actualCrc != expectedCrc) { onContinuousGlucoseMonitorSessionStartTimeReceivedWithCrcError(device, data); return; } } final Calendar calendar = DateTimeDataCallback.readDateTime(data, 0); final Integer timeZoneOffset = TimeZoneDataCallback.readTimeZone(data, 7); // [minutes] final DSTOffsetCallback.DSTOffset dstOffset = DSTOffsetDataCallback.readDSTOffset(data, 8); if (calendar == null || timeZoneOffset == null || dstOffset == null) { onInvalidDataReceived(device, data); return; } final TimeZone timeZone = new TimeZone() { @Override public int getOffset(final int era, final int year, final int month, final int day, final int dayOfWeek, final int milliseconds) { return (timeZoneOffset + dstOffset.offset) * 60000; // convert minutes to milliseconds } @Override public void setRawOffset(final int offsetMillis) { throw new UnsupportedOperationException("Can't set raw offset for this TimeZone"); } @Override public int getRawOffset() { return timeZoneOffset * 60000; } @Override public boolean useDaylightTime() { return true; } @Override public boolean inDaylightTime(final Date date) { // Use of DST is dependent on the input data only return dstOffset.offset > 0; } @Override public int getDSTSavings() { return dstOffset.offset * 60000; } // TODO add TimeZone ID // @Override // public String getID() { // return super.getID(); // } }; calendar.setTimeZone(timeZone); onContinuousGlucoseMonitorSessionStartTimeReceived(device, calendar, crcPresent); }
@Test public void onContinuousGlucoseMonitorSessionStartTimeReceived_noYear() { final Data data = new Data(new byte[] {(byte) 0, 0, 4, 24, 13, 8, 24, 8, 2 }); callback.onDataReceived(null, data); assertTrue(success); assertFalse(verified); assertFalse(result.isSet(Calendar.YEAR)); assertEquals(Calendar.APRIL, result.get(Calendar.MONTH)); assertEquals(24, result.get(Calendar.DATE)); assertEquals(13, result.get(Calendar.HOUR_OF_DAY)); assertEquals(8, result.get(Calendar.MINUTE)); assertEquals(24, result.get(Calendar.SECOND)); assertEquals(8 * 60000, result.get(Calendar.ZONE_OFFSET)); assertEquals(2 * 15 * 60000, result.get(Calendar.DST_OFFSET)); }
public String getConfigClass(String className) { return getHeader() + "\n\n" + // getRootClassDeclaration(root, className) + "\n\n" + // indentCode(INDENTATION, getFrameworkCode()) + "\n\n" + // ConfigGenerator.generateContent(INDENTATION, root, true) + "\n" + // "}\n"; }
@Test void verify_generated_class_against_reference() throws IOException { String testDefinition = String.join("\n", Files.readAllLines(FileSystems.getDefault().getPath(DEF_NAME))); List<String> referenceClassLines = Files.readAllLines(FileSystems.getDefault().getPath(REFERENCE_NAME)); DefParser parser = new DefParser("allfeatures", new StringReader(testDefinition)); InnerCNode root = parser.getTree(); JavaClassBuilder builder = new JavaClassBuilder(root, parser.getNormalizedDefinition(), null, null); String[] configClassLines = builder.getConfigClass("AllfeaturesConfig").split("\n"); for (var line : configClassLines) { System.out.println(line); } for (int i = 0; i < referenceClassLines.size(); i++) { if (configClassLines.length <= i) fail("Missing lines in generated config class. First missing line:\n" + referenceClassLines.get(i)); assertEquals(referenceClassLines.get(i), configClassLines[i], "Line " + i); } }
@Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } GcsPath paths = (GcsPath) o; return bucket.equals(paths.bucket) && object.equals(paths.object); }
@Test public void testEquals() { GcsPath a = GcsPath.fromComponents(null, "a/b/c"); GcsPath a2 = GcsPath.fromComponents(null, "a/b/c"); assertFalse(a.isAbsolute()); assertFalse(a2.isAbsolute()); GcsPath b = GcsPath.fromComponents("bucket", "a/b/c"); GcsPath b2 = GcsPath.fromComponents("bucket", "a/b/c"); assertTrue(b.isAbsolute()); assertTrue(b2.isAbsolute()); assertEquals(a, a); assertThat(a, Matchers.not(Matchers.equalTo(b))); assertThat(b, Matchers.not(Matchers.equalTo(a))); assertEquals(a, a2); assertEquals(a2, a); assertEquals(b, b2); assertEquals(b2, b); assertThat(a, Matchers.not(Matchers.equalTo(Paths.get("/tmp/foo")))); assertNotNull(a); }
@Override public void print(final Printed<K, V> printed) { Objects.requireNonNull(printed, "printed can't be null"); final PrintedInternal<K, V> printedInternal = new PrintedInternal<>(printed); final String name = new NamedInternal(printedInternal.name()).orElseGenerateWithPrefix(builder, PRINTING_NAME); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(printedInternal.build(this.name), name); final ProcessorGraphNode<? super K, ? super V> printNode = new ProcessorGraphNode<>(name, processorParameters); builder.addGraphNode(graphNode, printNode); }
@Test public void shouldNotAllowNullPrintedOnPrint() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.print(null)); assertThat(exception.getMessage(), equalTo("printed can't be null")); }
public Analysis analyze(Statement statement) { return analyze(statement, false); }
@Test public void testGroupByWithQualifiedName2() { // TODO: verify output analyze("SELECT t1.a FROM t1 GROUP BY a"); }
@Override public Object decode(Response response, Type type) throws IOException, DecodeException { if (response.status() == 404 || response.status() == 204) if (JSONObject.class.isAssignableFrom((Class<?>) type)) return new JSONObject(); else if (JSONArray.class.isAssignableFrom((Class<?>) type)) return new JSONArray(); else if (String.class.equals(type)) return null; else throw new DecodeException(response.status(), format("%s is not a type supported by this decoder.", type), response.request()); if (response.body() == null) return null; try (Reader reader = response.body().asReader(response.charset())) { Reader bodyReader = (reader.markSupported()) ? reader : new BufferedReader(reader); bodyReader.mark(1); if (bodyReader.read() == -1) { return null; // Empty body } bodyReader.reset(); return decodeBody(response, type, bodyReader); } catch (JSONException jsonException) { if (jsonException.getCause() != null && jsonException.getCause() instanceof IOException) { throw (IOException) jsonException.getCause(); } throw new DecodeException(response.status(), jsonException.getMessage(), response.request(), jsonException); } }
@Test void notFoundDecodesToEmpty() throws IOException { Response response = Response.builder() .status(404) .reason("Not found") .headers(Collections.emptyMap()) .request(request) .build(); assertThat(((JSONObject) new JsonDecoder().decode(response, JSONObject.class)).isEmpty()) .isTrue(); }
public void splitRegion(final long leftId, final Region right) { Requires.requireNonNull(right, "right"); Requires.requireNonNull(right.getRegionEpoch(), "right.regionEpoch"); final StampedLock stampedLock = this.stampedLock; final long stamp = stampedLock.writeLock(); try { final Region left = this.regionTable.get(leftId); Requires.requireNonNull(left, "left"); final byte[] leftStartKey = BytesUtil.nullToEmpty(left.getStartKey()); final byte[] leftEndKey = left.getEndKey(); final long rightId = right.getId(); final byte[] rightStartKey = right.getStartKey(); final byte[] rightEndKey = right.getEndKey(); Requires.requireNonNull(rightStartKey, "rightStartKey"); Requires.requireTrue(BytesUtil.compare(leftStartKey, rightStartKey) < 0, "leftStartKey must < rightStartKey"); if (leftEndKey == null || rightEndKey == null) { Requires.requireTrue(leftEndKey == rightEndKey, "leftEndKey must == rightEndKey"); } else { Requires.requireTrue(BytesUtil.compare(leftEndKey, rightEndKey) == 0, "leftEndKey must == rightEndKey"); Requires.requireTrue(BytesUtil.compare(rightStartKey, rightEndKey) < 0, "rightStartKey must < rightEndKey"); } final RegionEpoch leftEpoch = left.getRegionEpoch(); leftEpoch.setVersion(leftEpoch.getVersion() + 1); left.setEndKey(rightStartKey); this.regionTable.put(rightId, right.copy()); this.rangeTable.put(rightStartKey, rightId); } finally { stampedLock.unlockWrite(stamp); } }
@Test public void splitRegionTest() { RegionRouteTable table = new RegionRouteTable(); Region region = makeRegion(-1, null, null); table.addOrUpdateRegion(region); Region newRegion = makeRegion(1, BytesUtil.writeUtf8("t"), null); table.splitRegion(-1, newRegion); Region found = table.findRegionByKey(BytesUtil.writeUtf8("a")); assertEquals(-1, found.getId()); found = table.findRegionByKey(BytesUtil.writeUtf8("w")); assertEquals(1, found.getId()); }
public static List<Endpoint> listenerListToEndPoints( String input, Map<ListenerName, SecurityProtocol> nameToSecurityProto ) { return listenerListToEndPoints(input, n -> { SecurityProtocol result = nameToSecurityProto.get(n); if (result == null) { throw new IllegalArgumentException("No security protocol defined for listener " + n.value()); } return result; }); }
@Test public void testAnotherListenerListToEndPointsWithIpV6() { assertEquals(Arrays.asList( new Endpoint("SASL_SSL", SecurityProtocol.SASL_SSL, "fe80::b1da:69ca:57f7:63d8%3", 9092)), SocketServerConfigs.listenerListToEndPoints("SASL_SSL://[fe80::b1da:69ca:57f7:63d8%3]:9092", SocketServerConfigs.DEFAULT_NAME_TO_SECURITY_PROTO)); }
public static List<String> split(final String inlineExpression) { List<String> result = new ArrayList<>(); StringBuilder segment = new StringBuilder(); int bracketsDepth = 0; for (int i = 0; i < inlineExpression.length(); i++) { char each = inlineExpression.charAt(i); switch (each) { case ',': handleSplitter(bracketsDepth, segment, each, result); break; case '$': bracketsDepth = handleDollarSign(inlineExpression, i, bracketsDepth, segment, each); break; case '}': bracketsDepth = handleClosingBracket(bracketsDepth, segment, each); break; default: segment.append(each); break; } } if (segment.length() > 0) { result.add(segment.toString().trim()); } return result; }
@Test void assertSplit() { assertThat(GroovyUtils.split(" t_order_0, t_order_1 "), is(Arrays.asList("t_order_0", "t_order_1"))); assertThat(GroovyUtils.split("t_order_${null}"), is(Collections.singletonList("t_order_${null}"))); assertThat(GroovyUtils.split("t_order_${'xx'}"), is(Collections.singletonList("t_order_${'xx'}"))); assertThat(GroovyUtils.split("t_order_${[0, 1, 2]},t_order_item_${[0, 2]}"), is(Arrays.asList("t_order_${[0, 1, 2]}", "t_order_item_${[0, 2]}"))); assertThat(GroovyUtils.split("t_order_${0..2},t_order_item_${0..1}"), is(Arrays.asList("t_order_${0..2}", "t_order_item_${0..1}"))); assertThat(GroovyUtils.split("t_${[\"new${1+2}\",'old']}_order_${1..2}"), is(Collections.singletonList("t_${[\"new${1+2}\",'old']}_order_${1..2}"))); assertThat(GroovyUtils.split("t_$->{[\"new$->{1+2}\",'old']}_order_$->{1..2}"), is(Collections.singletonList("t_$->{[\"new$->{1+2}\",'old']}_order_$->{1..2}"))); }
public int getAppPriorityFailedRetrieved() { return numGetAppPriorityFailedRetrieved.value(); }
@Test public void testGetAppPriorityRetrievedFailed() { long totalBadBefore = metrics.getAppPriorityFailedRetrieved(); badSubCluster.getAppPriorityFailed(); Assert.assertEquals(totalBadBefore + 1, metrics.getAppPriorityFailedRetrieved()); }
@Override public void setConf(Configuration conf) { super.setConf(conf); getRawMapping().setConf(conf); }
@Test public void testTableCaching() throws IOException { File mapFile = File.createTempFile(getClass().getSimpleName() + ".testTableCaching", ".txt"); Files.asCharSink(mapFile, StandardCharsets.UTF_8).write( hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n"); mapFile.deleteOnExit(); TableMapping mapping = new TableMapping(); Configuration conf = new Configuration(); conf.set(NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY, mapFile.getCanonicalPath()); mapping.setConf(conf); List<String> names = new ArrayList<String>(); names.add(hostName1); names.add(hostName2); List<String> result1 = mapping.resolve(names); assertEquals(names.size(), result1.size()); assertEquals("/rack1", result1.get(0)); assertEquals("/rack2", result1.get(1)); // unset the file, see if it gets read again conf.set(NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY, "some bad value for a file"); List<String> result2 = mapping.resolve(names); assertEquals(result1, result2); }
public Calendar ceil(long t) { Calendar cal = new GregorianCalendar(Locale.US); cal.setTimeInMillis(t); return ceil(cal); }
@Test public void testCeil2() throws Exception { // make sure that lower fields are really reset correctly CronTab x = new CronTab("15,45 3 * * *"); Calendar c = new GregorianCalendar(2000, Calendar.MARCH, 1, 2, 30); compare(new GregorianCalendar(2000, Calendar.MARCH, 1, 3, 15), x.ceil(c)); }
public static int bytesToTagBE(byte[] bytes, int off) { return bytesToIntBE(bytes, off); }
@Test public void testBytesToTagBE() { assertEquals(Tag.PixelData, ByteUtils.bytesToTagBE(TAG_PIXEL_DATA_BE, 0)); }
@Udf(description = "Returns the hyperbolic cosine of an INT value") public Double cosh( @UdfParameter( value = "value", description = "The value in radians to get the hyperbolic cosine of." ) final Integer value ) { return cosh(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleZero() { assertThat(udf.cosh(0.0), closeTo(1.0, 0.000000000000001)); assertThat(udf.cosh(0), closeTo(1.0, 0.000000000000001)); assertThat(udf.cosh(0L), closeTo(1.0, 0.000000000000001)); }
public boolean loadSingle(String connectionId, String redirectAddress) { Connection connection = getConnection(connectionId); if (connection != null) { if (connection.getMetaInfo().isSdkSource()) { ConnectResetRequest connectResetRequest = new ConnectResetRequest(); if (StringUtils.isNotBlank(redirectAddress) && redirectAddress.contains(Constants.COLON)) { String[] split = redirectAddress.split(Constants.COLON); connectResetRequest.setServerIp(split[0]); connectResetRequest.setServerPort(split[1]); connectResetRequest.setConnectionId(connectionId); } try { connection.request(connectResetRequest, 3000L); } catch (ConnectionAlreadyClosedException e) { unregister(connectionId); } catch (Exception e) { LOGGER.error("error occurs when expel connection, connectionId: {} ", connectionId, e); return false; } } } return true; }
@Test void testLoadSingle() throws NacosException { Mockito.when(connectionMeta.isSdkSource()).thenReturn(true); connectionManager.loadSingle(connectId, clientIp); }
public static Range<ZonedDateTime> zonedDateTimeRange(String rangeStr) { Range<ZonedDateTime> range = ofString(rangeStr, parseZonedDateTime().compose(unquote()), ZonedDateTime.class); if (range.hasLowerBound() && range.hasUpperBound() && !EMPTY.equals(rangeStr)) { ZoneId lowerZone = range.lower().getZone(); ZoneId upperZone = range.upper().getZone(); if (!lowerZone.equals(upperZone)) { Duration lowerDst = ZoneId.systemDefault().getRules().getDaylightSavings(range.lower().toInstant()); Duration upperDst = ZoneId.systemDefault().getRules().getDaylightSavings(range.upper().toInstant()); long dstSeconds = upperDst.minus(lowerDst).getSeconds(); if(dstSeconds < 0 ) { dstSeconds *= -1; } long zoneDriftSeconds = ((ZoneOffset) lowerZone).getTotalSeconds() - ((ZoneOffset) upperZone).getTotalSeconds(); if (zoneDriftSeconds < 0) { zoneDriftSeconds *= -1; } if (dstSeconds != zoneDriftSeconds) { throw new IllegalArgumentException("The upper and lower bounds must be in same time zone!"); } } } return range; }
@Test public void zonedDateTimeTest() { assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.1-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.12-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.123-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.1234-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.12345-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.123456-06,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.123456+05:30,)")); assertNotNull(Range.zonedDateTimeRange("[2019-03-27 16:33:10.123456-06,infinity)")); }
public static ResourceId constructName( ResourceId baseFilename, String shardTemplate, String suffix, int shardNum, int numShards, @Nullable String paneStr, @Nullable String windowStr) { String prefix = extractFilename(baseFilename); // Matcher API works with StringBuffer, rather than StringBuilder. StringBuffer sb = new StringBuffer(); sb.append(prefix); Matcher m = SHARD_FORMAT_RE.matcher(shardTemplate); while (m.find()) { boolean isCurrentShardNum = (m.group(1).charAt(0) == 'S'); boolean isNumberOfShards = (m.group(1).charAt(0) == 'N'); boolean isPane = (m.group(1).charAt(0) == 'P') && paneStr != null; boolean isWindow = (m.group(1).charAt(0) == 'W') && windowStr != null; char[] zeros = new char[m.end() - m.start()]; Arrays.fill(zeros, '0'); DecimalFormat df = new DecimalFormat(String.valueOf(zeros)); if (isCurrentShardNum) { String formatted = df.format(shardNum); m.appendReplacement(sb, formatted); } else if (isNumberOfShards) { String formatted = df.format(numShards); m.appendReplacement(sb, formatted); } else if (isPane) { m.appendReplacement(sb, paneStr); } else if (isWindow) { m.appendReplacement(sb, windowStr); } } m.appendTail(sb); sb.append(suffix); return baseFilename .getCurrentDirectory() .resolve(sb.toString(), StandardResolveOptions.RESOLVE_FILE); }
@Test public void testConstructNameWithLargeShardCount() { assertEquals( "/out-100-of-5000.txt", constructName("/out", "-SS-of-NN", ".txt", 100, 5000, null, null)); }
public void registerStrategy(BatchingStrategy<?, ?, ?> strategy) { _strategies.add(strategy); }
@Test public void testExecuteBatchFailure() { RecordingStrategy<Integer, Integer, String> strategy = new RecordingStrategy<Integer, Integer, String>((key, promise) -> promise.done(String.valueOf(key)), key -> key % 2) { @Override public void executeBatch(Integer group, Batch<Integer, String> batch) { throw new RuntimeException(); } }; _batchingSupport.registerStrategy(strategy); Task<String> task = Task.par(strategy.batchable(0).recover(e -> "failed"), strategy.batchable(1).recover(e -> "failed"), strategy.batchable(2).recover(e -> "failed")) .map("concat", (s0, s1, s2) -> s0 + s1 + s2); String result = runAndWait("TestBatchingSupport.testExecuteBatchFailure", task); assertEquals(result, "failedfailedfailed"); assertTrue(strategy.getClassifiedKeys().contains(0)); assertTrue(strategy.getClassifiedKeys().contains(1)); assertTrue(strategy.getClassifiedKeys().contains(2)); assertEquals(strategy.getExecutedBatches().size(), 0); assertEquals(strategy.getExecutedSingletons().size(), 0); }
@Override public Long createMailLog(Long userId, Integer userType, String toMail, MailAccountDO account, MailTemplateDO template, String templateContent, Map<String, Object> templateParams, Boolean isSend) { MailLogDO.MailLogDOBuilder logDOBuilder = MailLogDO.builder(); // 根据是否要发送,设置状态 logDOBuilder.sendStatus(Objects.equals(isSend, true) ? MailSendStatusEnum.INIT.getStatus() : MailSendStatusEnum.IGNORE.getStatus()) // 用户信息 .userId(userId).userType(userType).toMail(toMail) .accountId(account.getId()).fromMail(account.getMail()) // 模板相关字段 .templateId(template.getId()).templateCode(template.getCode()).templateNickname(template.getNickname()) .templateTitle(template.getTitle()).templateContent(templateContent).templateParams(templateParams); // 插入数据库 MailLogDO logDO = logDOBuilder.build(); mailLogMapper.insert(logDO); return logDO.getId(); }
@Test public void testCreateMailLog() { // 准备参数 Long userId = randomLongId(); Integer userType = randomEle(UserTypeEnum.values()).getValue(); String toMail = randomEmail(); MailAccountDO account = randomPojo(MailAccountDO.class); MailTemplateDO template = randomPojo(MailTemplateDO.class); String templateContent = randomString(); Map<String, Object> templateParams = randomTemplateParams(); Boolean isSend = true; // mock 方法 // 调用 Long logId = mailLogService.createMailLog(userId, userType, toMail, account, template, templateContent, templateParams, isSend); // 断言 MailLogDO log = mailLogMapper.selectById(logId); assertNotNull(log); assertEquals(MailSendStatusEnum.INIT.getStatus(), log.getSendStatus()); assertEquals(userId, log.getUserId()); assertEquals(userType, log.getUserType()); assertEquals(toMail, log.getToMail()); assertEquals(account.getId(), log.getAccountId()); assertEquals(account.getMail(), log.getFromMail()); assertEquals(template.getId(), log.getTemplateId()); assertEquals(template.getCode(), log.getTemplateCode()); assertEquals(template.getNickname(), log.getTemplateNickname()); assertEquals(template.getTitle(), log.getTemplateTitle()); assertEquals(templateContent, log.getTemplateContent()); assertEquals(templateParams, log.getTemplateParams()); }
@Override public void upgrade() { if (clusterConfigService.get(MigrationCompleted.class) != null) { LOG.debug("Migration already completed."); return; } final UpdateResult updateResult = searchesCollection .updateMany( and( isDashboard(), atLeastOneQueryHasNonEmptyQueryString() ), makeQueryStringEmpty(), forNonEmptyQueryStrings() ); writeMigrationCompleted(updateResult.getModifiedCount()); }
@Test @MongoDBFixtures("V20200409083200_RemoveRootQueriesFromMigratedDashboards/sample.json") public void findsCorrectDocuments() throws JsonProcessingException, JSONException { migration.upgrade(); assertThat(rootQueryStrings("5d6ce7bd5d1eb45af534399e")).allMatch(String::isEmpty); assertThat(rootQueryStrings("5da9bc1b3a6a1d0d2f07faf2")).allMatch(String::isEmpty); assertThat(rootQueryStrings("5dad673d6131be4f08ceea77")).allMatch(String::isEmpty); assertThat(rootQueryStrings("5dbbf604799412036075d78f")).allMatch(String::isEmpty); assertThat(rootQueryStrings("5da9bbb944300ca38bc5da3e")).containsExactlyInAnyOrder("author:\"$author$\" AND project:\"graylog2-server\"", "author:\"$author$\""); assertThat(rootQueryStrings("5da9bbba12993f3904b41217")).containsExactlyInAnyOrder("author:\"$author$\" AND project:\"graylog2-server\"", "author:\"$author$\""); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatTumblingWindow() { // Given: final String statementString = "CREATE STREAM S AS SELECT ITEMID, COUNT(*) FROM ORDERS WINDOW TUMBLING (SIZE 7 DAYS) GROUP BY ITEMID;"; final Statement statement = parseSingle(statementString); final String result = SqlFormatter.formatSql(statement); assertThat(result, is("CREATE STREAM S AS SELECT\n" + " ITEMID,\n" + " COUNT(*)\n" + "FROM ORDERS ORDERS\n" + "WINDOW TUMBLING ( SIZE 7 DAYS ) \n" + "GROUP BY ITEMID\n" + "EMIT CHANGES")); }
@Override public RuntimeException handleFault(String failureMessage, Throwable cause) { if (cause == null) { log.error("Encountered fatal fault: {}", failureMessage); } else { log.error("Encountered fatal fault: {}", failureMessage, cause); } try { action.run(); } catch (Throwable e) { log.error("Failed to run terminating action.", e); } int statusCode = 1; if (shouldHalt) { Exit.halt(statusCode); } else { Exit.exit(statusCode); } return null; }
@Test public void testExitIsCalled() { AtomicBoolean exitCalled = new AtomicBoolean(false); Exit.setExitProcedure(terminatingProcedure(exitCalled)); AtomicBoolean actionCalled = new AtomicBoolean(false); Runnable action = () -> { assertFalse(exitCalled.get()); actionCalled.set(true); }; try { new ProcessTerminatingFaultHandler.Builder() .setShouldHalt(false) .setAction(action) .build() .handleFault("", null); } finally { Exit.resetExitProcedure(); } assertTrue(exitCalled.get()); assertTrue(actionCalled.get()); }
@Override public DateTime unmarshal(String v) throws Exception { return DateTime.parse(v, FORMATTER); }
@Test public void unmarshal() throws Exception { XmlDateTimeAdapter adapter = new XmlDateTimeAdapter(); String dateTimeInput = "2022-12-29T21:04:51.171Z"; assertEquals( Instant.ofEpochMilli(1672347891171L).toDateTime(), adapter.unmarshal(dateTimeInput)); }
@POST @Path("{id}") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response deploy(@PathParam("id") String id, Service service) { AppCatalogSolrClient sc = new AppCatalogSolrClient(); try { sc.deployApp(id, service); } catch (SolrServerException | IOException e) { return Response.status(Status.BAD_REQUEST).entity(e.toString()).build(); } YarnServiceClient yc = new YarnServiceClient(); yc.createApp(service); String output = "{\"status\":\"Application deployed.\",\"id\":\"" + service.getName() + "\"}"; return Response.status(Status.ACCEPTED).entity(output).build(); }
@Test void testDeploy() throws Exception { String id = "application 1"; AppListController ac = Mockito.mock(AppListController.class); Service service = new Service(); Response expected = Response.ok().build(); when(ac.deploy(id, service)).thenReturn(Response.ok().build()); final Response actual = ac.deploy(id, service); assertEquals(expected.getStatus(), actual.getStatus()); }
@Override public final void getSize(@NonNull SizeReadyCallback cb) { sizeDeterminer.getSize(cb); }
@Test public void getSize_withWrapContentHeightAndValidWidth_returnsWidthAndDisplayDimen() { int width = 100; LayoutParams params = new FrameLayout.LayoutParams(width, LayoutParams.WRAP_CONTENT); view.setLayoutParams(params); setDisplayDimens(200, 100); parent.getLayoutParams().height = 200; activity.visible(); target.getSize(cb); verify(cb).onSizeReady(width, 200); }
@Override public boolean isMonochrome() { return configurationParameters .getBoolean(ANSI_COLORS_DISABLED_PROPERTY_NAME) .orElse(false); }
@Test void isMonochrome() { MapConfigurationParameters ansiColors = new MapConfigurationParameters( Constants.ANSI_COLORS_DISABLED_PROPERTY_NAME, "true"); assertTrue(new CucumberEngineOptions(ansiColors).isMonochrome()); MapConfigurationParameters noAnsiColors = new MapConfigurationParameters( Constants.ANSI_COLORS_DISABLED_PROPERTY_NAME, "false"); assertFalse(new CucumberEngineOptions(noAnsiColors).isMonochrome()); }
public static SerializableFunction<Row, byte[]> getRowToProtoBytes( String fileDescriptorPath, String messageName) { ProtoSchemaInfo dynamicProtoDomain = getProtoDomain(fileDescriptorPath, messageName); ProtoDomain protoDomain = dynamicProtoDomain.getProtoDomain(); @SuppressWarnings("unchecked") ProtoDynamicMessageSchema<DynamicMessage> protoDynamicMessageSchema = ProtoDynamicMessageSchema.forDescriptor(protoDomain, messageName); return new SimpleFunction<Row, byte[]>() { @Override public byte[] apply(Row input) { SerializableFunction<Row, DynamicMessage> res = protoDynamicMessageSchema.getFromRowFunction(); return res.apply(input).toByteArray(); } }; }
@Test public void testRowToProtoFunction() { Row row = Row.withSchema(SCHEMA) .withFieldValue("id", 1234) .withFieldValue("name", "Doe") .withFieldValue("active", false) .withFieldValue("address.city", "seattle") .withFieldValue("address.street", "fake street") .withFieldValue("address.zip_code", "TO-1234") .withFieldValue("address.state", "wa") .build(); Assert.assertNotNull( ProtoByteUtils.getRowToProtoBytes(DESCRIPTOR_PATH, MESSAGE_NAME).apply(row)); }
@Override public List<MenuDO> getMenuListByTenant(MenuListReqVO reqVO) { List<MenuDO> menus = getMenuList(reqVO); // 开启多租户的情况下,需要过滤掉未开通的菜单 tenantService.handleTenantMenu(menuIds -> menus.removeIf(menu -> !CollUtil.contains(menuIds, menu.getId()))); return menus; }
@Test public void testGetMenuListByTenant() { // mock 数据 MenuDO menu100 = randomPojo(MenuDO.class, o -> o.setId(100L).setStatus(CommonStatusEnum.ENABLE.getStatus())); menuMapper.insert(menu100); MenuDO menu101 = randomPojo(MenuDO.class, o -> o.setId(101L).setStatus(CommonStatusEnum.DISABLE.getStatus())); menuMapper.insert(menu101); MenuDO menu102 = randomPojo(MenuDO.class, o -> o.setId(102L).setStatus(CommonStatusEnum.ENABLE.getStatus())); menuMapper.insert(menu102); // mock 过滤菜单 Set<Long> menuIds = asSet(100L, 101L); doNothing().when(tenantService).handleTenantMenu(argThat(handler -> { handler.handle(menuIds); return true; })); // 准备参数 MenuListReqVO reqVO = new MenuListReqVO().setStatus(CommonStatusEnum.ENABLE.getStatus()); // 调用 List<MenuDO> result = menuService.getMenuListByTenant(reqVO); // 断言 assertEquals(1, result.size()); assertPojoEquals(menu100, result.get(0)); }
@Override public void close(final ChannelHandlerContext ctx, final ChannelPromise promise) throws Exception { if (closeStatus == null || !ctx.channel().isActive()) { ctx.close(promise); } else { if (closeSent == null) { write(ctx, new CloseWebSocketFrame(closeStatus), ctx.newPromise()); } flush(ctx); applyCloseSentTimeout(ctx); closeSent.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) { ctx.close(promise); } }); } }
@Test public void testTimeout() throws Exception { final AtomicReference<ChannelPromise> ref = new AtomicReference<ChannelPromise>(); WebSocketProtocolHandler handler = new WebSocketProtocolHandler( false, WebSocketCloseStatus.NORMAL_CLOSURE, 1) { }; EmbeddedChannel channel = new EmbeddedChannel(new ChannelOutboundHandlerAdapter() { @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) { ref.set(promise); ReferenceCountUtil.release(msg); } }, handler); ChannelFuture future = channel.writeAndFlush(new CloseWebSocketFrame()); ChannelHandlerContext ctx = channel.pipeline().context(WebSocketProtocolHandler.class); handler.close(ctx, ctx.newPromise()); do { Thread.sleep(10); channel.runPendingTasks(); } while (!future.isDone()); assertThat(future.cause(), Matchers.instanceOf(WebSocketHandshakeException.class)); assertFalse(ref.get().isDone()); assertFalse(channel.finish()); }
public static org.apache.iceberg.Table loadIcebergTable(SparkSession spark, String name) throws ParseException, NoSuchTableException { CatalogAndIdentifier catalogAndIdentifier = catalogAndIdentifier(spark, name); TableCatalog catalog = asTableCatalog(catalogAndIdentifier.catalog); Table sparkTable = catalog.loadTable(catalogAndIdentifier.identifier); return toIcebergTable(sparkTable); }
@Test public void testLoadIcebergTable() throws Exception { spark.conf().set("spark.sql.catalog.hive", SparkCatalog.class.getName()); spark.conf().set("spark.sql.catalog.hive.type", "hive"); spark.conf().set("spark.sql.catalog.hive.default-namespace", "default"); String tableFullName = "hive.default.tbl"; sql("CREATE TABLE %s (c1 bigint, c2 string, c3 string) USING iceberg", tableFullName); Table table = Spark3Util.loadIcebergTable(spark, tableFullName); Assert.assertTrue(table.name().equals(tableFullName)); }
@SuppressWarnings("unchecked") @Override public <K, V> void forward(final Record<K, V> record) { final ProcessorNode<?, ?, ?, ?> previousNode = currentNode(); try { for (final ProcessorNode<?, ?, ?, ?> child : currentNode().children()) { setCurrentNode(child); ((ProcessorNode<K, V, ?, ?>) child).process(record); } } finally { setCurrentNode(previousNode); } }
@Test public void shouldFailToForwardUsingToParameter() { assertThrows(IllegalStateException.class, () -> globalContext.forward(null, null, To.all())); }
@Override public void forward(DeviceId deviceId, ForwardingObjective forwardingObjective) { checkPermission(FLOWRULE_WRITE); if (forwardingObjective.nextId() == null || flowObjectiveStore.getNextGroup(forwardingObjective.nextId()) != null || !queueFwdObjective(deviceId, forwardingObjective)) { // fast path installerExecutor.execute(new ObjectiveProcessor(deviceId, forwardingObjective, installerExecutor)); } }
@Test public void deviceUpEvent() throws TestUtilsException { TrafficSelector selector = DefaultTrafficSelector.emptySelector(); TrafficTreatment treatment = DefaultTrafficTreatment.emptyTreatment(); DeviceEvent event = new DeviceEvent(DeviceEvent.Type.DEVICE_ADDED, d2); DeviceListener listener = TestUtils.getField(manager, "deviceListener"); assertThat(listener, notNullValue()); listener.event(event); ForwardingObjective forward = DefaultForwardingObjective.builder() .fromApp(NetTestTools.APP_ID) .withFlag(ForwardingObjective.Flag.SPECIFIC) .withSelector(selector) .withTreatment(treatment) .makePermanent() .add(); manager.forward(id2, forward); // new device should have an objective now TestTools.assertAfter(RETRY_MS, () -> assertThat(forwardingObjectives, hasSize(1))); assertThat(forwardingObjectives, hasItem("of:d2")); assertThat(filteringObjectives, hasSize(0)); assertThat(nextObjectives, hasSize(0)); }
public static WindowStoreIterator<ValueAndTimestamp<GenericRow>> fetch( final ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>> store, final GenericKey key, final Instant lower, final Instant upper ) { Objects.requireNonNull(key, "key can't be null"); final List<ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>>> stores = getStores(store); final Function<ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>>, WindowStoreIterator<ValueAndTimestamp<GenericRow>>> fetchFunc = windowStore -> fetchUncached(windowStore, key, lower, upper); return findFirstNonEmptyIterator(stores, fetchFunc); }
@Test public void shouldAvoidNonWindowStore() throws IllegalAccessException { when(provider.stores(any(), any())).thenReturn(ImmutableList.of(meteredWindowStore)); SERDES_FIELD.set(meteredWindowStore, serdes); when(serdes.rawKey(any())).thenReturn(BYTES); when(meteredWindowStore.wrapped()).thenReturn(wrappedWindowStore); when(wrappedWindowStore.wrapped()).thenReturn(stateStore); when(wrappedWindowStore.fetch(any(), any(), any())).thenReturn(windowStoreIterator); when(windowStoreIterator.hasNext()).thenReturn(false); WindowStoreCacheBypass.fetch( store, SOME_KEY, Instant.ofEpochMilli(100), Instant.ofEpochMilli(200)); verify(wrappedWindowStore).fetch( new Bytes(BYTES), Instant.ofEpochMilli(100L), Instant.ofEpochMilli(200L)); }
public static List<String> wrapText(List<String> list, int columnWidth) { return list.stream() .map(line -> wrapText(line, columnWidth)) .collect(toList()); }
@Test public void testWrapText_String_int_zero() { Assertions.assertThrows(IllegalArgumentException.class, () -> StringUtils.wrapText("test", -1)); }
@Override public boolean checkExists(String path) { try { if (client.checkExists().forPath(path) != null) { return true; } } catch (Exception ignored) { } return false; }
@Test void testCreateContent4Persistent() { String path = "/curatorTest4CrContent/content.data"; String content = "createContentTest"; curatorClient.delete(path); assertThat(curatorClient.checkExists(path), is(false)); assertNull(curatorClient.getContent(path)); curatorClient.createOrUpdate(path, content, false); assertThat(curatorClient.checkExists(path), is(true)); assertEquals(curatorClient.getContent(path), content); }
public static short translateBucketAcl(AccessControlList acl, String userId) { short mode = (short) 0; for (Grant grant : acl.getGrantsAsList()) { Permission perm = grant.getPermission(); Grantee grantee = grant.getGrantee(); if (perm.equals(Permission.Read)) { if (isUserIdInGrantee(grantee, userId)) { // If the bucket is readable by the user, add r and x to the owner mode. mode |= (short) 0500; } } else if (perm.equals(Permission.Write)) { if (isUserIdInGrantee(grantee, userId)) { // If the bucket is writable by the user, +w to the owner mode. mode |= (short) 0200; } } else if (perm.equals(Permission.FullControl)) { if (isUserIdInGrantee(grantee, userId)) { // If the user has full control to the bucket, +rwx to the owner mode. mode |= (short) 0700; } } } return mode; }
@Test public void translatePermissionWithNullId() { // Emulate a corner case when returned grantee does not have ID from some S3 compatible UFS mUserGrantee.setIdentifier(null); mAcl.grantPermission(mUserGrantee, Permission.Read); Assert.assertEquals((short) 0000, S3AUtils.translateBucketAcl(mAcl, OTHER_ID)); }
public static Ip4Address valueOf(int value) { byte[] bytes = ByteBuffer.allocate(INET_BYTE_LENGTH).putInt(value).array(); return new Ip4Address(bytes); }
@Test public void testComparisonIPv4() { Ip4Address addr1, addr2, addr3, addr4; addr1 = Ip4Address.valueOf("1.2.3.4"); addr2 = Ip4Address.valueOf("1.2.3.4"); addr3 = Ip4Address.valueOf("1.2.3.3"); addr4 = Ip4Address.valueOf("1.2.3.5"); assertTrue(addr1.compareTo(addr2) == 0); assertTrue(addr1.compareTo(addr3) > 0); assertTrue(addr1.compareTo(addr4) < 0); addr1 = Ip4Address.valueOf("255.2.3.4"); addr2 = Ip4Address.valueOf("255.2.3.4"); addr3 = Ip4Address.valueOf("255.2.3.3"); addr4 = Ip4Address.valueOf("255.2.3.5"); assertTrue(addr1.compareTo(addr2) == 0); assertTrue(addr1.compareTo(addr3) > 0); assertTrue(addr1.compareTo(addr4) < 0); }
@Override public void commitJob(JobContext originalContext) throws IOException { commitJobs(Collections.singletonList(originalContext), Operation.OTHER); }
@Test public void testSuccessfulMultipleTasksPartitionedWrite() throws IOException { HiveIcebergOutputCommitter committer = new HiveIcebergOutputCommitter(); Table table = table(temp.getRoot().getPath(), true); JobConf conf = jobConf(table, 2); List<Record> expected = writeRecords(table.name(), 2, 0, true, false, conf); committer.commitJob(new JobContextImpl(conf, JOB_ID)); HiveIcebergTestUtils.validateFiles(table, conf, JOB_ID, 4); HiveIcebergTestUtils.validateData(table, expected, 1); }