focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public SendResult sendMessage( final String addr, final String brokerName, final Message msg, final SendMessageRequestHeader requestHeader, final long timeoutMillis, final CommunicationMode communicationMode, final SendMessageContext context, final DefaultMQProducerImpl producer ) throws RemotingException, MQBrokerException, InterruptedException { return sendMessage(addr, brokerName, msg, requestHeader, timeoutMillis, communicationMode, null, null, null, 0, context, producer); }
@Test public void testSendMessageOneWay_Success() throws RemotingException, InterruptedException, MQBrokerException { doNothing().when(remotingClient).invokeOneway(anyString(), any(RemotingCommand.class), anyLong()); SendResult sendResult = mqClientAPI.sendMessage(brokerAddr, brokerName, msg, new SendMessageRequestHeader(), 3 * 1000, CommunicationMode.ONEWAY, new SendMessageContext(), defaultMQProducerImpl); assertThat(sendResult).isNull(); }
@Override public String toString() { double value = getValue(displayUnit); String integer = String.format(Locale.ENGLISH, "%d", (long) value); String decimal = String.format(Locale.ENGLISH, "%.2f", value); if (decimal.equals(integer + ".00")) { return integer + displayUnit.getUnitString(); } else { return decimal + displayUnit.getUnitString(); } }
@Test public void testToString() { assertByteSizeString("42B", "42 B"); assertByteSizeString("42KB", "42 KB"); assertByteSizeString("42MB", "42 MB"); assertByteSizeString("42GB", "42 GB"); assertByteSizeString("42TB", "42 TB"); assertByteSizeString("42PB", "42 PB"); assertByteSizeString("42.20KB", "42.2 KB"); assertByteSizeString("42.33KB", "42.33KB"); }
@Override public OAuth2AccessTokenDO refreshAccessToken(String refreshToken, String clientId) { // 查询访问令牌 OAuth2RefreshTokenDO refreshTokenDO = oauth2RefreshTokenMapper.selectByRefreshToken(refreshToken); if (refreshTokenDO == null) { throw exception0(GlobalErrorCodeConstants.BAD_REQUEST.getCode(), "无效的刷新令牌"); } // 校验 Client 匹配 OAuth2ClientDO clientDO = oauth2ClientService.validOAuthClientFromCache(clientId); if (ObjectUtil.notEqual(clientId, refreshTokenDO.getClientId())) { throw exception0(GlobalErrorCodeConstants.BAD_REQUEST.getCode(), "刷新令牌的客户端编号不正确"); } // 移除相关的访问令牌 List<OAuth2AccessTokenDO> accessTokenDOs = oauth2AccessTokenMapper.selectListByRefreshToken(refreshToken); if (CollUtil.isNotEmpty(accessTokenDOs)) { oauth2AccessTokenMapper.deleteBatchIds(convertSet(accessTokenDOs, OAuth2AccessTokenDO::getId)); oauth2AccessTokenRedisDAO.deleteList(convertSet(accessTokenDOs, OAuth2AccessTokenDO::getAccessToken)); } // 已过期的情况下,删除刷新令牌 if (DateUtils.isExpired(refreshTokenDO.getExpiresTime())) { oauth2RefreshTokenMapper.deleteById(refreshTokenDO.getId()); throw exception0(GlobalErrorCodeConstants.UNAUTHORIZED.getCode(), "刷新令牌已过期"); } // 创建访问令牌 return createOAuth2AccessToken(refreshTokenDO, clientDO); }
@Test public void testRefreshAccessToken_expired() { // 准备参数 String refreshToken = randomString(); String clientId = randomString(); // mock 方法 OAuth2ClientDO clientDO = randomPojo(OAuth2ClientDO.class).setClientId(clientId); when(oauth2ClientService.validOAuthClientFromCache(eq(clientId))).thenReturn(clientDO); // mock 数据(访问令牌) OAuth2RefreshTokenDO refreshTokenDO = randomPojo(OAuth2RefreshTokenDO.class) .setRefreshToken(refreshToken).setClientId(clientId) .setExpiresTime(LocalDateTime.now().minusDays(1)); oauth2RefreshTokenMapper.insert(refreshTokenDO); // 调用,并断言 assertServiceException(() -> oauth2TokenService.refreshAccessToken(refreshToken, clientId), new ErrorCode(401, "刷新令牌已过期")); assertEquals(0, oauth2RefreshTokenMapper.selectCount()); }
protected void showModel(EpoxyModel<?> model, boolean show) { if (model.isShown() == show) { return; } model.show(show); notifyModelChanged(model); }
@Test public void testShowModel() { TestModel testModel = new TestModel(); testModel.hide(); testAdapter.addModels(testModel); testAdapter.showModel(testModel); verify(observer).onItemRangeChanged(0, 1, null); assertTrue(testModel.isShown()); checkDifferState(); }
private static int getErrorCode(final int featureCode, final int errorCode) { Preconditions.checkArgument(featureCode >= 0 && featureCode < 100, "The value range of feature code should be [0, 100)."); Preconditions.checkArgument(errorCode >= 0 && errorCode < 100, "The value range of error code should be [0, 100)."); return featureCode * 100 + errorCode; }
@Test void assertToSQLException() { SQLException actual = new FeatureSQLException(XOpenSQLState.GENERAL_ERROR, 1, 1, "reason") { }.toSQLException(); assertThat(actual.getSQLState(), is(XOpenSQLState.GENERAL_ERROR.getValue())); assertThat(actual.getErrorCode(), is(20101)); assertThat(actual.getMessage(), is("reason")); }
@Override public Producer createProducer() throws Exception { return new FopProducer(this, fopFactory, outputType.getFormatExtended()); }
@Test public void encryptPdfWithUserPassword() throws Exception { if (!canTest()) { // cannot run on CI return; } Endpoint endpoint = context().getEndpoint("fop:pdf"); Producer producer = endpoint.createProducer(); Exchange exchange = new DefaultExchange(context); final String password = "secret"; exchange.getIn().setHeader("CamelFop.Encrypt.userPassword", password); exchange.getIn().setBody(FopHelper.decorateTextWithXSLFO("Test Content")); producer.process(exchange); try (InputStream inputStream = exchange.getMessage().getBody(InputStream.class)) { PDDocument document = Loader.loadPDF(new RandomAccessReadBuffer(inputStream), password); assertTrue(document.isEncrypted()); } }
@Transactional(readOnly = true) public TemplateResponse generateReviewForm(String reviewRequestCode) { ReviewGroup reviewGroup = reviewGroupRepository.findByReviewRequestCode(reviewRequestCode) .orElseThrow(() -> new ReviewGroupNotFoundByReviewRequestCodeException(reviewRequestCode)); Template template = templateRepository.findById(reviewGroup.getTemplateId()) .orElseThrow(() -> new TemplateNotFoundByReviewGroupException( reviewGroup.getId(), reviewGroup.getTemplateId() )); return templateMapper.mapToTemplateResponse(reviewGroup, template); }
@Test void 잘못된_리뷰_요청_코드로_리뷰_작성폼을_요청할_경우_예외가_발생한다() { // given ReviewGroup reviewGroup = new ReviewGroup("리뷰이명", "프로젝트명", "reviewRequestCode", "groupAccessCode"); reviewGroupRepository.save(reviewGroup); // when, then assertThatThrownBy(() -> templateService.generateReviewForm(reviewGroup.getReviewRequestCode() + " ")) .isInstanceOf(ReviewGroupNotFoundByReviewRequestCodeException.class); }
@Override public void onProjectsDeleted(Set<DeletedProject> projects) { checkNotNull(projects, "projects can't be null"); if (projects.isEmpty()) { return; } Arrays.stream(listeners) .forEach(safelyCallListener(listener -> listener.onProjectsDeleted(projects))); }
@Test @UseDataProvider("oneOrManyDeletedProjects") public void onProjectsDeleted_does_not_fail_if_there_is_no_listener(Set<DeletedProject> projects) { assertThatCode(() -> underTestNoListeners.onProjectsDeleted(projects)).doesNotThrowAnyException(); }
@PostMapping("/verify") public EmailVerifyResult verifyEmail(@RequestBody @Valid EmailVerifyRequest request, @RequestHeader(MijnDigidSession.MIJN_DIGID_SESSION_HEADER) String mijnDigiDsessionId){ MijnDigidSession mijnDigiDSession = retrieveMijnDigiDSession(mijnDigiDsessionId); return accountService.verifyEmail(mijnDigiDSession.getAccountId(), request); }
@Test public void validEmailVerify() { EmailVerifyRequest request = new EmailVerifyRequest(); request.setVerificationCode("code"); EmailVerifyResult result = new EmailVerifyResult(); result.setStatus(Status.OK); result.setError("error"); result.setRemainingAttempts(6); when(accountService.verifyEmail(anyLong(), any())).thenReturn(result); EmailVerifyResult verifyResult = emailController.verifyEmail(request, mijnDigiDSession.getId()); assertEquals(Status.OK, verifyResult.getStatus()); assertEquals("error", verifyResult.getError()); assertEquals(6, verifyResult.getRemainingAttempts()); }
public static Comparator<Object[]> getComparator(List<OrderByExpressionContext> orderByExpressions, ColumnContext[] orderByColumnContexts, boolean nullHandlingEnabled) { return getComparator(orderByExpressions, orderByColumnContexts, nullHandlingEnabled, 0, orderByExpressions.size()); }
@Test public void testDescNullsLast() { List<OrderByExpressionContext> orderBys = Collections.singletonList(new OrderByExpressionContext(COLUMN1, DESC, NULLS_LAST)); setUpSingleColumnRows(); _rows.sort(OrderByComparatorFactory.getComparator(orderBys, ENABLE_NULL_HANDLING)); assertEquals(extractColumn(_rows, COLUMN1_INDEX), Arrays.asList(2, 1, null)); }
public Map<String, Double> toNormalizedMap() { Map<String, Double> ret = this.normalizedResources.toNormalizedMap(); ret.put(Constants.COMMON_OFFHEAP_MEMORY_RESOURCE_NAME, offHeap); ret.put(Constants.COMMON_ONHEAP_MEMORY_RESOURCE_NAME, onHeap); return ret; }
@Test public void testNonAckerCPUSetting() { Map<String, Object> topoConf = new HashMap<>(); topoConf.put(Config.TOPOLOGY_ACKER_CPU_PCORE_PERCENT, 40); topoConf.put(Config.TOPOLOGY_COMPONENT_CPU_PCORE_PERCENT, 50); NormalizedResourceRequest request = new NormalizedResourceRequest(topoConf, "notAnAckerComponent"); Map<String, Double> normalizedMap = request.toNormalizedMap(); Double cpu = normalizedMap.get(Constants.COMMON_CPU_RESOURCE_NAME); assertNotNull(cpu); assertEquals(50, cpu, 0.001); }
public DirectoryEntry lookUp( File workingDirectory, JimfsPath path, Set<? super LinkOption> options) throws IOException { checkNotNull(path); checkNotNull(options); DirectoryEntry result = lookUp(workingDirectory, path, options, 0); if (result == null) { // an intermediate file in the path did not exist or was not a directory throw new NoSuchFileException(path.toString()); } return result; }
@Test public void testLookup_relative_finalSymlink_nofollowLinks() throws IOException { assertExists(lookup("four/five", NOFOLLOW_LINKS), "four", "five"); assertExists(lookup("four/six", NOFOLLOW_LINKS), "four", "six"); assertExists(lookup("four/loop", NOFOLLOW_LINKS), "four", "loop"); }
public static boolean isBase64(CharSequence base64) { if (base64 == null || base64.length() < 2) { return false; } final byte[] bytes = StrUtil.utf8Bytes(base64); if (bytes.length != base64.length()) { // 如果长度不相等,说明存在双字节字符,肯定不是Base64,直接返回false return false; } return isBase64(bytes); }
@Test public void isBase64Test2(){ String base64 = "dW1kb3MzejR3bmljM2J6djAyZzcwbWk5M213Nnk3cWQ3eDJwOHFuNXJsYmMwaXhxbmg0dmxrcmN0anRkbmd3\n" + "ZzcyZWFwanI2NWNneTg2dnp6cmJoMHQ4MHpxY2R6c3pjazZtaQ=="; assertTrue(Base64.isBase64(base64)); // '=' 不位于末尾 base64 = "dW1kb3MzejR3bmljM2J6=djAyZzcwbWk5M213Nnk3cWQ3eDJwOHFuNXJsYmMwaXhxbmg0dmxrcmN0anRkbmd3\n" + "ZzcyZWFwanI2NWNneTg2dnp6cmJoMHQ4MHpxY2R6c3pjazZtaQ="; assertFalse(Base64.isBase64(base64)); }
@Override public PostDO getPost(Long id) { return postMapper.selectById(id); }
@Test public void testGetPost() { // mock 数据 PostDO dbPostDO = randomPostDO(); postMapper.insert(dbPostDO); // 准备参数 Long id = dbPostDO.getId(); // 调用 PostDO post = postService.getPost(id); // 断言 assertNotNull(post); assertPojoEquals(dbPostDO, post); }
@Override public String getColumnName(final int columnIndex) throws SQLException { return resultSetMetaData.getColumnName(columnIndex); }
@Test void assertGetColumnName() throws SQLException { assertThat(queryResultMetaData.getColumnName(1), is("order_id")); }
@Udf(description = "Converts a string representation of a date in the given format" + " into the number of milliseconds since 1970-01-01 00:00:00 UTC/GMT." + " Single quotes in the timestamp format can be escaped with ''," + " for example: 'yyyy-MM-dd''T''HH:mm:ssX'." + " The system default time zone is used when no time zone is explicitly provided.") public long stringToTimestamp( @UdfParameter( description = "The string representation of a date.") final String formattedTimestamp, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { // NB: We do not perform a null here preferring to throw an exception as // there is no sentinel value for a "null" Date. try { final StringToTimestampParser timestampParser = parsers.get(formatPattern); return timestampParser.parse(formattedTimestamp); } catch (final ExecutionException | RuntimeException e) { throw new KsqlFunctionException("Failed to parse timestamp '" + formattedTimestamp + "' with formatter '" + formatPattern + "': " + e.getMessage(), e); } }
@Test public void shouldThrowOnNullDate() { // When: final Exception e = assertThrows( KsqlFunctionException.class, () -> udf.stringToTimestamp(null, "yyyy-MM-dd") ); // Then: assertThat(e.getMessage(), Matchers.containsString("Failed to parse timestamp 'null' with formatter")); }
public static Iterator<Row> computeUpdates( Iterator<Row> rowIterator, StructType rowType, String[] identifierFields) { Iterator<Row> carryoverRemoveIterator = removeCarryovers(rowIterator, rowType); ChangelogIterator changelogIterator = new ComputeUpdateIterator(carryoverRemoveIterator, rowType, identifierFields); return Iterators.filter(changelogIterator, Objects::nonNull); }
@Test public void testRowsWithNullValue() { final List<Row> rowsWithNull = Lists.newArrayList( new GenericRowWithSchema(new Object[] {2, null, null, DELETE, 0, 0}, null), new GenericRowWithSchema(new Object[] {3, null, null, INSERT, 0, 0}, null), new GenericRowWithSchema(new Object[] {4, null, null, DELETE, 0, 0}, null), new GenericRowWithSchema(new Object[] {4, null, null, INSERT, 0, 0}, null), // mixed null and non-null value in non-identifier columns new GenericRowWithSchema(new Object[] {5, null, null, DELETE, 0, 0}, null), new GenericRowWithSchema(new Object[] {5, null, "data", INSERT, 0, 0}, null), // mixed null and non-null value in identifier columns new GenericRowWithSchema(new Object[] {6, null, null, DELETE, 0, 0}, null), new GenericRowWithSchema(new Object[] {6, "name", null, INSERT, 0, 0}, null)); Iterator<Row> iterator = ChangelogIterator.computeUpdates(rowsWithNull.iterator(), SCHEMA, IDENTIFIER_FIELDS); List<Row> result = Lists.newArrayList(iterator); assertEquals( "Rows should match", Lists.newArrayList( new Object[] {2, null, null, DELETE, 0, 0}, new Object[] {3, null, null, INSERT, 0, 0}, new Object[] {5, null, null, UPDATE_BEFORE, 0, 0}, new Object[] {5, null, "data", UPDATE_AFTER, 0, 0}, new Object[] {6, null, null, DELETE, 0, 0}, new Object[] {6, "name", null, INSERT, 0, 0}), rowsToJava(result)); }
@Override public void write(int b) throws IOException { if (buffer.length <= bufferIdx) { flushInternalBuffer(); } buffer[bufferIdx] = (byte) b; ++bufferIdx; }
@Test void testSecondaryWriteFail() throws Exception { DuplicatingCheckpointOutputStream duplicatingStream = createDuplicatingStreamWithFailingSecondary(); testFailingSecondaryStream( duplicatingStream, () -> { for (int i = 0; i < 128; i++) { duplicatingStream.write(42); } }); }
@Override public String getDocumentationLink(@Nullable String suffix) { return documentationBaseUrl + Optional.ofNullable(suffix).orElse(""); }
@Test public void getDocumentationLink_whenSnapshot_returnLatest() { when(sonarQubeVersion.get().qualifier()).thenReturn("SNAPSHOT"); documentationLinkGenerator = new DefaultDocumentationLinkGenerator(sonarQubeVersion, configuration); String generatedLink = documentationLinkGenerator.getDocumentationLink(TEST_SUFFIX); assertThat(generatedLink).isEqualTo(DOCUMENTATION_PUBLIC_URL + "latest/documentation/analyzing-source-code/scm-integration/"); }
@Override public void kill() throws IOException { LOG.info("Killing {}:{}", supervisorId, workerId); if (shutdownTimer == null) { shutdownTimer = shutdownDuration.time(); } try { if (resourceIsolationManager != null) { resourceIsolationManager.kill(getWorkerUser(), workerId); } } catch (IOException e) { numKillExceptions.mark(); throw e; } }
@Test public void testKill() throws Exception { final String topoId = "test_topology"; final Map<String, Object> superConf = new HashMap<>(); AdvancedFSOps ops = mock(AdvancedFSOps.class); when(ops.doRequiredTopoFilesExist(superConf, topoId)).thenReturn(true); LocalAssignment la = new LocalAssignment(); la.set_topology_id(topoId); MockResourceIsolationManager iso = new MockResourceIsolationManager(); String workerId = "worker-id"; MockContainer mc = new MockContainer(ContainerType.LAUNCH, superConf, "SUPERVISOR", 6628, 8080, la, iso, workerId, new HashMap<>(), ops, new StormMetricsRegistry()); iso.allWorkerIds.add(workerId); assertEquals(Collections.EMPTY_LIST, iso.killedWorkerIds); assertEquals(Collections.EMPTY_LIST, iso.forceKilledWorkerIds); mc.kill(); assertEquals(iso.allWorkerIds, iso.killedWorkerIds); assertEquals(Collections.EMPTY_LIST, iso.forceKilledWorkerIds); iso.killedWorkerIds.clear(); mc.forceKill(); assertEquals(Collections.EMPTY_LIST, iso.killedWorkerIds); assertEquals(iso.allWorkerIds, iso.forceKilledWorkerIds); }
@Override public TSetConfigResponse setConfig(TSetConfigRequest request) throws TException { try { Preconditions.checkState(request.getKeys().size() == request.getValues().size()); Map<String, String> configs = new HashMap<>(); for (int i = 0; i < request.getKeys().size(); i++) { String key = request.getKeys().get(i); String value = request.getValues().get(i); configs.put(key, value); if ("mysql_server_version".equalsIgnoreCase(key)) { if (!Strings.isNullOrEmpty(value)) { GlobalVariable.version = value; } } } GlobalStateMgr.getCurrentState().getNodeMgr().setFrontendConfig(configs); return new TSetConfigResponse(new TStatus(TStatusCode.OK)); } catch (DdlException e) { TStatus status = new TStatus(TStatusCode.INTERNAL_ERROR); status.setError_msgs(Lists.newArrayList(e.getMessage())); return new TSetConfigResponse(status); } }
@Test public void testSetFrontendConfig() throws TException { FrontendServiceImpl impl = new FrontendServiceImpl(exeEnv); TSetConfigRequest request = new TSetConfigRequest(); request.keys = Lists.newArrayList("mysql_server_version"); request.values = Lists.newArrayList("5.1.1"); TSetConfigResponse result = impl.setConfig(request); Assert.assertEquals("5.1.1", GlobalVariable.version); }
@Override public void writeShort(final int v) throws IOException { ensureAvailable(SHORT_SIZE_IN_BYTES); MEM.putShort(buffer, ARRAY_BYTE_BASE_OFFSET + pos, (short) v); pos += SHORT_SIZE_IN_BYTES; }
@Test public void testWriteShortForVByteOrder() throws Exception { short expected = 100; out.writeShort(expected, ByteOrder.LITTLE_ENDIAN); out.writeShort(expected, ByteOrder.BIG_ENDIAN); short actual1 = Bits.readShort(out.buffer, 0, false); short actual2 = Bits.readShort(out.buffer, 2, true); assertEquals(expected, actual1); assertEquals(expected, actual2); }
@Override public int length() { return length; }
@Test public void testLength() { System.out.println("length"); MultivariateGaussianDistribution instance = new MultivariateGaussianDistribution(mu, 1.0); assertEquals(4, instance.length()); instance = new MultivariateGaussianDistribution(mu, sigma[0]); assertEquals(6, instance.length()); instance = new MultivariateGaussianDistribution(mu, Matrix.of(sigma)); assertEquals(9, instance.length()); }
@Override public Object parse(final String property, final Object value) { if (property.equalsIgnoreCase(KsqlConstants.LEGACY_RUN_SCRIPT_STATEMENTS_CONTENT)) { validator.validate(property, value); return value; } final ConfigItem configItem = resolver.resolve(property, true) .orElseThrow(() -> new PropertyNotFoundException(property)); final Object parsedValue = configItem.parseValue(value); validator.validate(configItem.getPropertyName(), parsedValue); return parsedValue; }
@Test(expected = IllegalArgumentException.class) public void shouldThrowIfValidatorThrows() { // Given: doThrow(new IllegalArgumentException("Boom")) .when(validator).validate(anyString(), any(Object.class)); // When: parser.parse(ProducerConfig.LINGER_MS_CONFIG, "100"); }
static GlobalPhaseSetup maybeMakeSetup(RankProfilesConfig.Rankprofile rp, RankProfilesEvaluator modelEvaluator) { var model = modelEvaluator.modelForRankProfile(rp.name()); Map<String, RankProfilesConfig.Rankprofile.Normalizer> availableNormalizers = new HashMap<>(); for (var n : rp.normalizer()) { availableNormalizers.put(n.name(), n); } Supplier<FunctionEvaluator> functionEvaluatorSource = null; int rerankCount = -1; Set<String> namesToHide = new HashSet<>(); Set<String> matchFeatures = new HashSet<>(); Map<String, String> renameFeatures = new HashMap<>(); String toRename = null; for (var prop : rp.fef().property()) { if (prop.name().equals("vespa.globalphase.rerankcount")) { rerankCount = Integer.parseInt(prop.value()); } if (prop.name().equals("vespa.rank.globalphase")) { functionEvaluatorSource = () -> model.evaluatorOf("globalphase"); } if (prop.name().equals("vespa.hidden.matchfeature")) { namesToHide.add(prop.value()); } if (prop.name().equals("vespa.match.feature")) { matchFeatures.add(prop.value()); } if (prop.name().equals("vespa.feature.rename")) { if (toRename == null) { toRename = prop.value(); } else { renameFeatures.put(toRename, prop.value()); toRename = null; } } } if (rerankCount < 0) { rerankCount = 100; } if (functionEvaluatorSource != null) { var mainResolver = new InputResolver(matchFeatures, renameFeatures, availableNormalizers.keySet()); var evaluator = functionEvaluatorSource.get(); var allInputs = List.copyOf(evaluator.function().arguments()); mainResolver.resolve(allInputs); List<NormalizerSetup> normalizers = new ArrayList<>(); for (var input : mainResolver.usedNormalizers) { var cfg = availableNormalizers.get(input); String normInput = cfg.input(); if (matchFeatures.contains(normInput) || renameFeatures.containsValue(normInput)) { Supplier<Evaluator> normSource = () -> new DummyEvaluator(normInput); normalizers.add(makeNormalizerSetup(cfg, matchFeatures, renameFeatures, normSource, List.of(normInput), rerankCount)); } else { Supplier<FunctionEvaluator> normSource = () -> model.evaluatorOf(normInput); var normInputs = List.copyOf(normSource.get().function().arguments()); var normSupplier = SimpleEvaluator.wrap(normSource); normalizers.add(makeNormalizerSetup(cfg, matchFeatures, renameFeatures, normSupplier, normInputs, rerankCount)); } } Supplier<Evaluator> supplier = SimpleEvaluator.wrap(functionEvaluatorSource); var gfun = new FunEvalSpec(supplier, mainResolver.fromQuery, mainResolver.fromMF); var defaultValues = extraDefaultQueryFeatureValues(rp, mainResolver.fromQuery, normalizers); return new GlobalPhaseSetup(gfun, rerankCount, namesToHide, normalizers, defaultValues); } return null; }
@Test void funcWithArgsSetup() { RankProfilesConfig rpCfg = readConfig("with_mf_funargs"); assertEquals(1, rpCfg.rankprofile().size()); RankProfilesEvaluator rpEvaluator = createEvaluator(rpCfg); var setup = GlobalPhaseSetup.maybeMakeSetup(rpCfg.rankprofile().get(0), rpEvaluator); assertNotNull(setup); assertEquals(0, setup.normalizers.size()); assertEquals(3, setup.matchFeaturesToHide.size()); assertEquals(0, setup.globalPhaseEvalSpec.fromQuery().size()); var wantMF = setup.globalPhaseEvalSpec.fromMF(); assertEquals(4, wantMF.size()); wantMF.sort((a, b) -> a.matchFeatureName().compareTo(b.matchFeatureName())); assertEquals("plusOne(2)", wantMF.get(0).matchFeatureName()); assertEquals("plusOne(attribute(foo2))", wantMF.get(1).matchFeatureName()); assertEquals("useAttr(t1,42)", wantMF.get(2).matchFeatureName()); assertEquals("withIndirect(foo1)", wantMF.get(3).matchFeatureName()); }
@Override public V getAndRemove(K name) { int h = hashingStrategy.hashCode(name); return remove0(h, index(h), checkNotNull(name, "name")); }
@Test public void testGetAndRemove() { TestDefaultHeaders headers = newInstance(); headers.add(of("name1"), of("value1")); headers.add(of("name2"), of("value2"), of("value3")); headers.add(of("name3"), of("value4"), of("value5"), of("value6")); assertEquals(of("value1"), headers.getAndRemove(of("name1"), of("defaultvalue"))); assertEquals(of("value2"), headers.getAndRemove(of("name2"))); assertNull(headers.getAndRemove(of("name2"))); assertEquals(asList(of("value4"), of("value5"), of("value6")), headers.getAllAndRemove(of("name3"))); assertEquals(0, headers.size()); assertNull(headers.getAndRemove(of("noname"))); assertEquals(of("defaultvalue"), headers.getAndRemove(of("noname"), of("defaultvalue"))); }
@Override public FileAttributes getFileAttributes() { checkState(this.type == Type.FILE, "Only component of type FILE have a FileAttributes object"); return this.fileAttributes; }
@Test public void isUnitTest_returns_true_if_IsTest_is_set_in_BatchComponent() { ComponentImpl component = buildSimpleComponent(FILE, "file").setFileAttributes(new FileAttributes(true, null, 1)).build(); assertThat(component.getFileAttributes().isUnitTest()).isTrue(); }
@Override public Authentication validateRequest(ServletRequest request, ServletResponse response, boolean mandatory) throws ServerAuthException { JWT_LOGGER.trace("Authentication request received for " + request.toString()); if (!(request instanceof HttpServletRequest) && !(response instanceof HttpServletResponse)) { return Authentication.UNAUTHENTICATED; } String serializedJWT; HttpServletRequest req = (HttpServletRequest) request; // we'll skip the authentication for CORS preflight requests if (HttpMethod.OPTIONS.name().equalsIgnoreCase(req.getMethod())) { return Authentication.NOT_CHECKED; } serializedJWT = getJwtFromBearerAuthorization(req); if (serializedJWT == null) { serializedJWT = getJwtFromCookie(req); } if (serializedJWT == null) { String loginURL = _authenticationProviderUrlGenerator.apply(req); JWT_LOGGER.info("No JWT token found, sending redirect to " + loginURL); try { ((HttpServletResponse) response).sendRedirect(loginURL); return Authentication.SEND_CONTINUE; } catch (IOException e) { JWT_LOGGER.error("Couldn't authenticate request", e); throw new ServerAuthException(e); } } else { try { SignedJWT jwtToken = SignedJWT.parse(serializedJWT); String userName = jwtToken.getJWTClaimsSet().getSubject(); request.setAttribute(JWT_TOKEN_REQUEST_ATTRIBUTE, serializedJWT); UserIdentity identity = login(userName, jwtToken, request); if (identity == null) { ((HttpServletResponse) response).setStatus(HttpStatus.UNAUTHORIZED_401); return Authentication.SEND_FAILURE; } else { return new UserAuthentication(getAuthMethod(), identity); } } catch (ParseException pe) { String loginURL = _authenticationProviderUrlGenerator.apply(req); JWT_LOGGER.warn("Unable to parse the JWT token, redirecting back to the login page", pe); try { ((HttpServletResponse) response).sendRedirect(loginURL); } catch (IOException e) { throw new ServerAuthException(e); } } } return Authentication.SEND_FAILURE; }
@Test public void testRedirect() throws IOException, ServerAuthException { JwtAuthenticator authenticator = new JwtAuthenticator(TOKEN_PROVIDER, JWT_TOKEN); HttpServletRequest request = mock(HttpServletRequest.class); expect(request.getMethod()).andReturn(HttpMethod.GET.asString()); expect(request.getQueryString()).andReturn(null); expect(request.getHeader(HttpHeader.AUTHORIZATION.asString())).andReturn(null); expect(request.getCookies()).andReturn(new Cookie[] {}); expect(request.getRequestURL()).andReturn(new StringBuffer(CRUISE_CONTROL_ENDPOINT)); HttpServletResponse response = mock(HttpServletResponse.class); response.sendRedirect(TOKEN_PROVIDER.replace(JwtAuthenticator.REDIRECT_URL, CRUISE_CONTROL_ENDPOINT)); expectLastCall().andVoid(); replay(request, response); Authentication actualAuthentication = authenticator.validateRequest(request, response, true); verify(request, response); assertEquals(Authentication.SEND_CONTINUE, actualAuthentication); }
@Override public final boolean wasNull() { return wasNull; }
@Test void assertWasNull() { QueryResult queryResult = mock(QueryResult.class); streamMergedResult.setCurrentQueryResult(queryResult); assertFalse(streamMergedResult.wasNull()); }
public static boolean isUnclosedQuote(final String line) { // CHECKSTYLE_RULES.ON: CyclomaticComplexity int quoteStart = -1; for (int i = 0; i < line.length(); ++i) { if (quoteStart < 0 && isQuoteChar(line, i)) { quoteStart = i; } else if (quoteStart >= 0 && isTwoQuoteStart(line, i) && !isEscaped(line, i)) { // Together, two quotes are effectively an escaped quote and don't act as a quote character. // Skip the next quote char, since it's coupled with the first. i++; } else if (quoteStart >= 0 && isQuoteChar(line, i) && !isEscaped(line, i)) { quoteStart = -1; } } final int commentInd = line.indexOf(COMMENT); if (commentInd < 0) { return quoteStart >= 0; } else if (quoteStart < 0) { return false; } else { return commentInd > quoteStart; } }
@Test public void shouldFindUnclosedQuote_escapedThree() { // Given: final String line = "some line 'this is in a quote\\\'"; // Then: assertThat(UnclosedQuoteChecker.isUnclosedQuote(line), is(true)); }
public void validateMatcher() throws ValidationException { validate(Validator.lengthValidator(255), getMatcher()); }
@Test void shouldValidateMatcherForLessThan255() throws Exception { user = new User("UserName", new String[]{"Jez,Pavan"}, "user@mail.com", true); user.validateMatcher(); }
@Override public void loadGlue(Glue glue, List<URI> gluePaths) { gluePaths.stream() .filter(gluePath -> CLASSPATH_SCHEME.equals(gluePath.getScheme())) .map(ClasspathSupport::packageName) .map(classFinder::scanForClassesInPackage) .flatMap(Collection::stream) .filter(InjectorSource.class::isAssignableFrom) .distinct() .forEach(container::addClass); }
@Test() void list_of_uris_cant_be_null() { GuiceBackend backend = new GuiceBackend(factory, classLoader); assertThrows(NullPointerException.class, () -> backend.loadGlue(glue, null)); }
@Override public boolean next() throws SQLException { if (getCurrentQueryResult().next()) { return true; } if (!queryResults.hasNext()) { return false; } setCurrentQueryResult(queryResults.next()); boolean hasNext = getCurrentQueryResult().next(); if (hasNext) { return true; } while (!hasNext && queryResults.hasNext()) { setCurrentQueryResult(queryResults.next()); hasNext = getCurrentQueryResult().next(); } return hasNext; }
@Test void assertNextForResultSetsAllNotEmpty() throws SQLException { List<QueryResult> queryResults = Arrays.asList(mock(QueryResult.class, RETURNS_DEEP_STUBS), mock(QueryResult.class, RETURNS_DEEP_STUBS), mock(QueryResult.class, RETURNS_DEEP_STUBS)); for (QueryResult each : queryResults) { when(each.next()).thenReturn(true, false); } ShardingDQLResultMerger resultMerger = new ShardingDQLResultMerger(TypedSPILoader.getService(DatabaseType.class, "MySQL")); ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(database.getName()).thenReturn(DefaultDatabase.LOGIC_NAME); MergedResult actual = resultMerger.merge(queryResults, selectStatementContext, database, mock(ConnectionContext.class)); assertTrue(actual.next()); assertTrue(actual.next()); assertTrue(actual.next()); assertFalse(actual.next()); }
public static int getMajorVersion() { return MAJOR_VERSION; }
@Test void getMajorVersion() { assertDoesNotThrow(JVM::getMajorVersion); }
public static int getCloseTimeout(URL url) { String configuredCloseTimeout = System.getProperty(Constants.CLOSE_TIMEOUT_CONFIG_KEY); int defaultCloseTimeout = -1; if (StringUtils.isNotEmpty(configuredCloseTimeout)) { try { defaultCloseTimeout = Integer.parseInt(configuredCloseTimeout); } catch (NumberFormatException e) { // use default heartbeat } } if (defaultCloseTimeout < 0) { defaultCloseTimeout = getIdleTimeout(url); } int closeTimeout = url.getParameter(Constants.CLOSE_TIMEOUT_KEY, defaultCloseTimeout); int heartbeat = getHeartbeat(url); if (closeTimeout < heartbeat * 2) { throw new IllegalStateException("closeTimeout < heartbeatInterval * 2"); } return closeTimeout; }
@Test void testGetCloseTimeout() { URL url1 = URL.valueOf("dubbo://127.0.0.1:12345?heartbeat=10000"); URL url2 = URL.valueOf("dubbo://127.0.0.1:12345?heartbeat=10000&heartbeat.timeout=50000"); URL url3 = URL.valueOf("dubbo://127.0.0.1:12345?heartbeat=10000&heartbeat.timeout=10000"); URL url4 = URL.valueOf("dubbo://127.0.0.1:12345?close.timeout=30000&heartbeat=10000&heartbeat.timeout=10000"); URL url5 = URL.valueOf("dubbo://127.0.0.1:12345?close.timeout=40000&heartbeat=10000&heartbeat.timeout=50000"); URL url6 = URL.valueOf("dubbo://127.0.0.1:12345?close.timeout=10000&heartbeat=10000&heartbeat.timeout=10000"); Assertions.assertEquals(30000, UrlUtils.getCloseTimeout(url1)); Assertions.assertEquals(50000, UrlUtils.getCloseTimeout(url2)); Assertions.assertThrows(RuntimeException.class, () -> UrlUtils.getCloseTimeout(url3)); Assertions.assertThrows(RuntimeException.class, () -> UrlUtils.getCloseTimeout(url4)); Assertions.assertEquals(40000, UrlUtils.getCloseTimeout(url5)); Assertions.assertThrows(RuntimeException.class, () -> UrlUtils.getCloseTimeout(url6)); }
public static Object convert(final Object o) { if (o == null) { return RubyUtil.RUBY.getNil(); } final Class<?> cls = o.getClass(); final Valuefier.Converter converter = CONVERTER_MAP.get(cls); if (converter != null) { return converter.convert(o); } return fallbackConvert(o, cls); }
@Test public void testLocalDateTime() { LocalDateTime ldt = LocalDateTime.now(); Object result = Valuefier.convert(ldt); assertEquals(JrubyTimestampExtLibrary.RubyTimestamp.class, result.getClass()); }
public static <K, V> Read<K, V> read() { return new AutoValue_KafkaIO_Read.Builder<K, V>() .setTopics(new ArrayList<>()) .setTopicPartitions(new ArrayList<>()) .setConsumerFactoryFn(KafkaIOUtils.KAFKA_CONSUMER_FACTORY_FN) .setConsumerConfig(KafkaIOUtils.DEFAULT_CONSUMER_PROPERTIES) .setMaxNumRecords(Long.MAX_VALUE) .setCommitOffsetsInFinalizeEnabled(false) .setDynamicRead(false) .setTimestampPolicyFactory(TimestampPolicyFactory.withProcessingTime()) .setConsumerPollingTimeout(2L) .setRedistributed(false) .setAllowDuplicates(false) .setRedistributeNumKeys(0) .build(); }
@Test public void testUnboundedReaderLogsCommitFailure() throws Exception { List<String> topics = ImmutableList.of("topic_a"); PositionErrorConsumerFactory positionErrorConsumerFactory = new PositionErrorConsumerFactory(); UnboundedSource<KafkaRecord<Integer, Long>, KafkaCheckpointMark> source = KafkaIO.<Integer, Long>read() .withBootstrapServers("myServer1:9092,myServer2:9092") .withTopics(topics) .withConsumerFactoryFn(positionErrorConsumerFactory) .withKeyDeserializer(IntegerDeserializer.class) .withValueDeserializer(LongDeserializer.class) .makeSource(); UnboundedReader<KafkaRecord<Integer, Long>> reader = source.createReader(null, null); reader.start(); unboundedReaderExpectedLogs.verifyWarn("exception while fetching latest offset for partition"); reader.close(); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void getChatAdministrators() { GetChatAdministratorsResponse response = bot.execute(new GetChatAdministrators(groupId)); for (ChatMember chatMember : response.administrators()) { ChatMemberTest.check(chatMember); if (chatMember.user().firstName().equals("Test Bot")) { assertFalse(chatMember.canBeEdited()); assertTrue(chatMember.canChangeInfo()); assertTrue(chatMember.canDeleteMessages()); assertTrue(chatMember.canInviteUsers()); assertTrue(chatMember.canRestrictMembers()); assertTrue(chatMember.canPinMessages()); assertTrue(chatMember.canPromoteMembers()); assertTrue(chatMember.canManageVoiceChats()); assertTrue(chatMember.canManageVideoChats()); assertTrue(chatMember.canManageChat()); assertTrue(chatMember.canManageChat()); assertTrue(chatMember.canManageChat()); assertTrue(chatMember.canPostStories()); assertTrue(chatMember.canEditStories()); assertTrue(chatMember.canDeleteStories()); } } }
@Override public List<HttpCookie> getRequestCookies() { return _requestCookies; }
@Test public void testCookiesLocalAttr() throws Exception { URI uri = URI.create("resources"); RequestContext requestContext = new RequestContext(); List<HttpCookie> localCookies = Collections.singletonList(new HttpCookie("test", "value")); requestContext.putLocalAttr(ServerResourceContext.CONTEXT_COOKIES_KEY, localCookies); ServerResourceContext resourceContext = new ResourceContextImpl( new PathKeysImpl(), new TestResourceContext.MockRequest(uri), requestContext); // Assert that request cookies are retrieved from the local attribute. Assert.assertSame(resourceContext.getRequestCookies(), localCookies); }
@Override public Result<V, E> search(Graph<V, E> graph, V src, V dst, EdgeWeigher<V, E> weigher, int maxPaths) { checkArguments(graph, src, dst); return internalSearch(graph, src, dst, weigher != null ? weigher : new DefaultEdgeWeigher<>(), maxPaths); }
@Test(expected = IllegalArgumentException.class) public void noSuchSourceArgument() { graphSearch().search(new AdjacencyListsGraph<>(of(B, C), of(new TestEdge(B, C))), A, H, weigher, 1); }
@Override public List<ApolloAuditLogDTO> queryLogsByOpName(String opName, Date startDate, Date endDate, int page, int size) { if (startDate == null && endDate == null) { return ApolloAuditUtil.logListToDTOList(logService.findByOpName(opName, page, size)); } return ApolloAuditUtil.logListToDTOList( logService.findByOpNameAndTime(opName, startDate, endDate, page, size)); }
@Test public void testQueryLogsByOpNameCaseDateIsNull() { final String opName = "query-op-name"; final Date startDate = null; final Date endDate = null; { List<ApolloAuditLog> logList = MockBeanFactory.mockAuditLogListByLength(size); Mockito.when(logService.findByOpName(Mockito.eq(opName), Mockito.eq(page), Mockito.eq(size))) .thenReturn(logList); } List<ApolloAuditLogDTO> dtoList = api.queryLogsByOpName(opName, startDate, endDate, page, size); Mockito.verify(logService, Mockito.times(1)) .findByOpName(Mockito.eq(opName), Mockito.eq(page), Mockito.eq(size)); assertEquals(size, dtoList.size()); }
@Deprecated public static DynamicTableSource createTableSource( @Nullable Catalog catalog, ObjectIdentifier objectIdentifier, ResolvedCatalogTable catalogTable, ReadableConfig configuration, ClassLoader classLoader, boolean isTemporary) { final DefaultDynamicTableContext context = new DefaultDynamicTableContext( objectIdentifier, catalogTable, Collections.emptyMap(), configuration, classLoader, isTemporary); return createDynamicTableSource( getDynamicTableFactory(DynamicTableSourceFactory.class, catalog, context), objectIdentifier, catalogTable, Collections.emptyMap(), configuration, classLoader, isTemporary); }
@Test void testManagedConnector() { final Map<String, String> options = createAllOptions(); options.remove("connector"); final DynamicTableSource actualSource = createTableSource(SCHEMA, options); assertThat(actualSource).isExactlyInstanceOf(TestManagedTableSource.class); }
public GoConfigHolder loadConfigHolder(final String content, Callback callback) throws Exception { CruiseConfig configForEdit; CruiseConfig config; LOGGER.debug("[Config Save] Loading config holder"); configForEdit = deserializeConfig(content); if (callback != null) callback.call(configForEdit); config = preprocessAndValidate(configForEdit); return new GoConfigHolder(config, configForEdit); }
@Test void shouldLoadAllowOnlySuccessOnSuccessApprovalType() throws Exception { String content = config( """ <pipelines group="first"> <pipeline name="pipeline"> <materials> <hg url="/hgrepo"/> </materials> <stage name="mingle"> <approval type="success" allowOnlyOnSuccess="true" /> <jobs> <job name="functional"> <tasks> <exec command="echo"> <runif status="passed" /> </exec> </tasks> </job> </jobs> </stage> </pipeline> </pipelines>""", CONFIG_SCHEMA_VERSION); CruiseConfig config = xmlLoader.loadConfigHolder(goConfigMigration.upgradeIfNecessary(content)).config; Approval approval = config .getPipelineConfigByName(new CaseInsensitiveString("pipeline")) .getStage("mingle") .getApproval(); assertThat(approval.getType()).isEqualTo("success"); assertThat(approval.isAllowOnlyOnSuccess()).isEqualTo(true); }
@Override public Rule register(String ref, RuleKey ruleKey) { requireNonNull(ruleKey, "ruleKey can not be null"); Rule rule = rulesByUuid.get(ref); if (rule != null) { if (!ruleKey.repository().equals(rule.repository()) || !ruleKey.rule().equals(rule.key())) { throw new IllegalArgumentException(format( "Specified RuleKey '%s' is not equal to the one already registered in repository for ref %s: '%s'", ruleKey, ref, RuleKey.of(rule.repository(), rule.key()))); } return rule; } rule = new Rule(ref, ruleKey.repository(), ruleKey.rule()); rulesByUuid.put(ref, rule); return rule; }
@Test public void register_returns_Rule_object_created_from_arguments() { for (int i = 0; i < someRandomInt(); i++) { String repository = SOME_REPOSITORY + i; String ruleKey = String.valueOf(i); Rule rule = underTest.register(Integer.toString(i), RuleKey.of(repository, ruleKey)); assertThat(rule.ref()).isEqualTo(Integer.toString(i)); assertThat(rule.repository()).isEqualTo(repository); assertThat(rule.key()).isEqualTo(ruleKey); } }
@Override protected Mono<Void> getFallback() { if (isFailedExecution()) { LOG.error("hystrix execute have error: ", getExecutionException()); } final Throwable exception = getExecutionException(); return fallback(exchange, getCallBackUri(), exception); }
@Test public void testGetFallback() { assertThrows(NullPointerException.class, () -> StepVerifier.create(hystrixCommandOnThread.getFallback()).expectSubscription().verifyComplete()); }
@Override synchronized int numBuffered() { return wrapped.numBuffered(); }
@Test public void testNumBufferedWithTopicPartition() { final TopicPartition partition = new TopicPartition("topic", 0); final int numBuffered = 1; when(wrapped.numBuffered(partition)).thenReturn(numBuffered); final int result = synchronizedPartitionGroup.numBuffered(partition); assertEquals(numBuffered, result); verify(wrapped, times(1)).numBuffered(partition); }
public static <T> boolean isNotNullOrEmpty(Collection<T> collection) { return !isNullOrEmpty(collection); }
@Test void isNotNullOrEmptyIsFalseForEmptyCollection() { assertThat(isNotNullOrEmpty(new ArrayList<>())).isFalse(); }
@Override public ListenableFuture<?> execute(CreateFunction statement, TransactionManager transactionManager, Metadata metadata, AccessControl accessControl, QueryStateMachine stateMachine, List<Expression> parameters) { Map<NodeRef<com.facebook.presto.sql.tree.Parameter>, Expression> parameterLookup = parameterExtractor(statement, parameters); Session session = stateMachine.getSession(); Analyzer analyzer = new Analyzer(session, metadata, sqlParser, accessControl, Optional.empty(), parameters, parameterLookup, stateMachine.getWarningCollector()); Analysis analysis = analyzer.analyze(statement); if (analysis.getFunctionHandles().values().stream() .anyMatch(SqlFunctionHandle.class::isInstance)) { throw new PrestoException(NOT_SUPPORTED, "Invoking a dynamically registered function in SQL function body is not supported"); } SqlInvokedFunction function = createSqlInvokedFunction(statement, metadata, analysis); if (statement.isTemporary()) { stateMachine.addSessionFunction(new SqlFunctionId(function.getSignature().getName(), function.getSignature().getArgumentTypes()), function); } else { metadata.getFunctionAndTypeManager().createFunction(function, statement.isReplace()); } return immediateFuture(null); }
@Test public void testCreateTemporaryFunction() { SqlParser parser = new SqlParser(); String sqlString = "CREATE TEMPORARY FUNCTION foo() RETURNS int RETURN 1"; CreateFunction statement = (CreateFunction) parser.createStatement(sqlString, ParsingOptions.builder().build()); TransactionManager transactionManager = createTestTransactionManager(); QueryStateMachine stateMachine = createQueryStateMachine(sqlString, TEST_SESSION, false, transactionManager, executorService, metadataManager); new CreateFunctionTask(parser).execute(statement, transactionManager, metadataManager, new AllowAllAccessControl(), stateMachine, emptyList()); assertEquals(stateMachine.getAddedSessionFunctions().size(), 1); }
@Override public void emitWatermark(Watermark watermark) { final long newWatermark = watermark.getTimestamp(); if (newWatermark <= maxWatermarkSoFar) { return; } maxWatermarkSoFar = newWatermark; watermarkEmitted.updateCurrentEffectiveWatermark(maxWatermarkSoFar); try { markActiveInternally(); output.emitWatermark( new org.apache.flink.streaming.api.watermark.Watermark(newWatermark)); } catch (ExceptionInChainedOperatorException e) { throw e; } catch (Exception e) { throw new ExceptionInChainedOperatorException(e); } }
@Test void testWatermarksDoNotRegress() { final CollectingDataOutput<Object> testingOutput = new CollectingDataOutput<>(); final WatermarkToDataOutput wmOutput = new WatermarkToDataOutput(testingOutput); wmOutput.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(12L)); wmOutput.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(17L)); wmOutput.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(10L)); wmOutput.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(18L)); wmOutput.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(17L)); wmOutput.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(18L)); assertThat(testingOutput.events) .contains(new Watermark(12L), new Watermark(17L), new Watermark(18L)); }
public static synchronized void configure(DataflowWorkerLoggingOptions options) { if (!initialized) { throw new RuntimeException("configure() called before initialize()"); } // For compatibility reason, we do not call SdkHarnessOptions.getConfiguredLoggerFromOptions // to config the logging for legacy worker, instead replicate the config steps used for // DataflowWorkerLoggingOptions for default log level and log level overrides. SdkHarnessOptions harnessOptions = options.as(SdkHarnessOptions.class); boolean usedDeprecated = false; // default value for both DefaultSdkHarnessLogLevel and DefaultWorkerLogLevel are INFO Level overrideLevel = getJulLevel(harnessOptions.getDefaultSdkHarnessLogLevel()); if (options.getDefaultWorkerLogLevel() != null && options.getDefaultWorkerLogLevel() != INFO) { overrideLevel = getJulLevel(options.getDefaultWorkerLogLevel()); usedDeprecated = true; } LogManager.getLogManager().getLogger(ROOT_LOGGER_NAME).setLevel(overrideLevel); if (options.getWorkerLogLevelOverrides() != null) { for (Map.Entry<String, DataflowWorkerLoggingOptions.Level> loggerOverride : options.getWorkerLogLevelOverrides().entrySet()) { Logger logger = Logger.getLogger(loggerOverride.getKey()); logger.setLevel(getJulLevel(loggerOverride.getValue())); configuredLoggers.add(logger); } usedDeprecated = true; } else if (harnessOptions.getSdkHarnessLogLevelOverrides() != null) { for (Map.Entry<String, SdkHarnessOptions.LogLevel> loggerOverride : harnessOptions.getSdkHarnessLogLevelOverrides().entrySet()) { Logger logger = Logger.getLogger(loggerOverride.getKey()); logger.setLevel(getJulLevel(loggerOverride.getValue())); configuredLoggers.add(logger); } } // If the options specify a level for messages logged to System.out/err, we need to reconfigure // the corresponding stream adapter. if (options.getWorkerSystemOutMessageLevel() != null) { System.out.close(); System.setOut( JulHandlerPrintStreamAdapterFactory.create( loggingHandler, SYSTEM_OUT_LOG_NAME, getJulLevel(options.getWorkerSystemOutMessageLevel()), Charset.defaultCharset())); } if (options.getWorkerSystemErrMessageLevel() != null) { System.err.close(); System.setErr( JulHandlerPrintStreamAdapterFactory.create( loggingHandler, SYSTEM_ERR_LOG_NAME, getJulLevel(options.getWorkerSystemErrMessageLevel()), Charset.defaultCharset())); } if (usedDeprecated) { LOG.warn( "Deprecated DataflowWorkerLoggingOptions are used for log level settings." + "Consider using options defined in SdkHarnessOptions for forward compatibility."); } }
@Test public void testSystemOutRespectsFilterConfig() throws IOException { DataflowWorkerLoggingOptions options = PipelineOptionsFactory.as(DataflowWorkerLoggingOptions.class); options.setDefaultWorkerLogLevel(DataflowWorkerLoggingOptions.Level.ERROR); DataflowWorkerLoggingInitializer.configure(options); System.out.println("sys.out"); System.err.println("sys.err"); List<String> actualLines = retrieveLogLines(); assertThat(actualLines, not(hasItem(containsString("sys.out")))); assertThat(actualLines, hasItem(containsString("sys.err"))); }
public static boolean isEmptyStatement(String sql) { TokenSource tokens = getLexer(sql, ImmutableSet.of()); while (true) { Token token = tokens.nextToken(); if (token.getType() == Token.EOF) { return true; } if (token.getChannel() != Token.HIDDEN_CHANNEL) { return false; } } }
@Test public void testIsEmptyStatement() { assertTrue(isEmptyStatement("")); assertTrue(isEmptyStatement(" ")); assertTrue(isEmptyStatement("\t\n ")); assertTrue(isEmptyStatement("--foo\n --what")); assertTrue(isEmptyStatement("/* oops */")); assertFalse(isEmptyStatement("x")); assertFalse(isEmptyStatement("select")); assertFalse(isEmptyStatement("123")); assertFalse(isEmptyStatement("z#oops")); }
public static String getContentIdentity(String content) { int index = content.indexOf(WORD_SEPARATOR); if (index == -1) { throw new IllegalArgumentException("content does not contain separator"); } return content.substring(0, index); }
@Test void testGetContentIdentityFail() { assertThrows(IllegalArgumentException.class, () -> { String content = "aabbb"; ContentUtils.getContentIdentity(content); }); }
public static String toJson(Object o) { return toJson(o, false); }
@Test void testBeanConversion() { SimplePojo pojo = new SimplePojo(); String s = JsonUtils.toJson(pojo); Match.that(s).isEqualTo("{\"bar\":0,\"foo\":null}"); Map<String, Object> map = Json.of(pojo).asMap(); Match.that(map).isEqualTo("{ foo: null, bar: 0 }"); }
public ClassTemplateSpec generate(DataSchema schema, DataSchemaLocation location) { pushCurrentLocation(location); final ClassTemplateSpec result = processSchema(schema, null, null); popCurrentLocation(); return result; }
@Test(dataProvider = "customTypeDataForRecord") public void testCustomInfoForRecordFields(final List<DataSchema> customTypedSchemas) { final List<RecordDataSchema.Field> fields = customTypedSchemas.stream() .map(RecordDataSchema.Field::new) .peek(field -> field.setName("field_" + _uniqueNumberGenerator.getAndIncrement(), null)) .collect(Collectors.toList()); final RecordDataSchema record = new RecordDataSchema(new Name(INPUT_SCHEMA_NAME), RecordDataSchema.RecordType.RECORD); record.setFields(fields, null); final TemplateSpecGenerator generator = new TemplateSpecGenerator(_resolver); final RecordTemplateSpec spec = (RecordTemplateSpec) generator.generate(record, _location); for (int i = 0; i < customTypedSchemas.size(); ++i) { Assert.assertNotNull(spec.getFields().get(i).getCustomInfo()); Assert.assertEquals(spec.getFields().get(i).getCustomInfo().getCustomClass().getClassName(), CustomTypeUtil.getJavaCustomTypeClassNameFromSchema((TyperefDataSchema) customTypedSchemas.get(i))); } }
static String writeIndexName(Model model, long timeBucket) { String tableName = IndexController.INSTANCE.getTableName(model); if (model.isRecord() && model.isSuperDataset()) { return tableName + Const.LINE + compressTimeBucket(timeBucket / 1000000, SUPER_DATASET_DAY_STEP); } else { switch (model.getDownsampling()) { case None: return tableName; case Hour: return tableName + Const.LINE + compressTimeBucket(timeBucket / 100, DAY_STEP); case Minute: return tableName + Const.LINE + compressTimeBucket(timeBucket / 10000, DAY_STEP); case Day: return tableName + Const.LINE + compressTimeBucket(timeBucket, DAY_STEP); case Second: return tableName + Const.LINE + compressTimeBucket(timeBucket / 1000000, DAY_STEP); default: throw new UnexpectedException("Unexpected down sampling value, " + model.getDownsampling()); } } }
@Test public void testIndexRolling() { long secondTimeBucket = 2020_0809_1010_59L; long minuteTimeBucket = 2020_0809_1010L; Assertions.assertEquals( "superDatasetModel-20200809", writeIndexName(superDatasetModel, secondTimeBucket) ); Assertions.assertEquals( "records-all-20200807", writeIndexName(normalRecordModel, secondTimeBucket) ); Assertions.assertEquals( "metrics-all-20200807", writeIndexName(normalMetricsModel, minuteTimeBucket) ); secondTimeBucket += 1000000; minuteTimeBucket += 10000; Assertions.assertEquals( "superDatasetModel-20200810", writeIndexName(superDatasetModel, secondTimeBucket) ); Assertions.assertEquals( "records-all-20200810", writeIndexName(normalRecordModel, secondTimeBucket) ); Assertions.assertEquals( "metrics-all-20200810", writeIndexName(normalMetricsModel, minuteTimeBucket) ); }
public static String uncompress(byte[] compressedURL) { StringBuffer url = new StringBuffer(); switch (compressedURL[0] & 0x0f) { case EDDYSTONE_URL_PROTOCOL_HTTP_WWW: url.append(URL_PROTOCOL_HTTP_WWW_DOT); break; case EDDYSTONE_URL_PROTOCOL_HTTPS_WWW: url.append(URL_PROTOCOL_HTTPS_WWW_DOT); break; case EDDYSTONE_URL_PROTOCOL_HTTP: url.append(URL_PROTOCOL_HTTP_COLON_SLASH_SLASH); break; case EDDYSTONE_URL_PROTOCOL_HTTPS: url.append(URL_PROTOCOL_HTTPS_COLON_SLASH_SLASH); break; default: break; } byte lastByte = -1; for (int i = 1; i < compressedURL.length; i++) { byte b = compressedURL[i]; if (lastByte == 0 && b == 0 ) { break; } lastByte = b; String tld = topLevelDomainForByte(b); if (tld != null) { url.append(tld); } else { url.append((char) b); } } return url.toString(); }
@Test public void testUncompressWithoutTLD() throws MalformedURLException { String testURL = "http://xxx"; byte[] testBytes = {0x02, 'x', 'x', 'x'}; assertEquals(testURL, UrlBeaconUrlCompressor.uncompress(testBytes)); }
@Override public Object convert(String value) { if (isNullOrEmpty(value)) { return value; } if (value.contains("=")) { final Map<String, String> fields = new HashMap<>(); Matcher m = PATTERN.matcher(value); while (m.find()) { if (m.groupCount() != 2) { continue; } fields.put(removeQuotes(m.group(1)), removeQuotes(m.group(2))); } return fields; } else { return Collections.emptyMap(); } }
@Test public void testFilterWithSingleQuotedValue() { TokenizerConverter f = new TokenizerConverter(new HashMap<String, Object>()); @SuppressWarnings("unchecked") Map<String, String> result = (Map<String, String>) f.convert("otters in k1='v1' more otters"); assertEquals(1, result.size()); assertEquals("v1", result.get("k1")); }
public SqlType getExpressionSqlType(final Expression expression) { return getExpressionSqlType(expression, Collections.emptyMap()); }
@Test public void shouldEvaluateTypeForCreateArrayExpressionWithNull() { // Given: Expression expression = new CreateArrayExpression( ImmutableList.of( new UnqualifiedColumnReferenceExp(COL0), new NullLiteral() ) ); // When: final SqlType type = expressionTypeManager.getExpressionSqlType(expression); // Then: assertThat(type, is(SqlTypes.array(SqlTypes.BIGINT))); }
public static String getParent(String path) throws InvalidPathException { return getParentCleaned(cleanPath(path)); }
@Test public void getParent() throws InvalidPathException { // get a parent that is non-root assertEquals("/foo", PathUtils.getParent("/foo/bar")); assertEquals("/foo", PathUtils.getParent("/foo/bar/")); assertEquals("/foo", PathUtils.getParent("/foo/./bar/")); assertEquals("/foo", PathUtils.getParent("/foo/././bar/")); // get a parent that is root assertEquals("/", PathUtils.getParent("/foo")); assertEquals("/", PathUtils.getParent("/foo/bar/../")); assertEquals("/", PathUtils.getParent("/foo/../bar/")); // get parent of root assertEquals("/", PathUtils.getParent("/")); assertEquals("/", PathUtils.getParent("/foo/bar/../../")); assertEquals("/", PathUtils.getParent("/foo/../bar/../")); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() != ChatMessageType.GAMEMESSAGE) { return; } String message = Text.removeTags(event.getMessage()); if (message.startsWith("Grand Exchange: Finished")) { notifier.notify(config.notifyOnOfferComplete(), message); } else if (message.startsWith("Grand Exchange:")) { notifier.notify(config.enableNotifications(), message); } }
@Test public void testNotifyComplete() { when(grandExchangeConfig.notifyOnOfferComplete()).thenReturn(Notification.ON); ChatMessage chatMessage = new ChatMessage(); chatMessage.setType(ChatMessageType.GAMEMESSAGE); chatMessage.setMessage("<col=006000>Grand Exchange: Finished buying 1 x Acorn.</col>"); grandExchangePlugin.onChatMessage(chatMessage); verify(notifier).notify(any(Notification.class), anyString()); }
@Override public <T> ResponseFuture<T> sendRequest(Request<T> request, RequestContext requestContext) { FutureCallback<Response<T>> callback = new FutureCallback<>(); sendRequest(request, requestContext, callback); return new ResponseFutureImpl<>(callback); }
@SuppressWarnings("deprecation") @Test(dataProvider = TestConstants.RESTLI_PROTOCOL_1_2_PREFIX + "sendRequestAndGetResponseOptions") public void testRestLiResponseExceptionFuture(SendRequestOption sendRequestOption, GetResponseOption getResponseOption, TimeoutOption timeoutOption, ProtocolVersionOption versionOption, ProtocolVersion protocolVersion, String errorResponseHeaderName, ContentType contentType) throws RemoteInvocationException, TimeoutException, InterruptedException, IOException { final String ERR_KEY = "someErr"; final String ERR_VALUE = "WHOOPS!"; final String ERR_MSG = "whoops2"; final int HTTP_CODE = 400; final int APP_CODE = 666; final String CODE = "INVALID_INPUT"; final String DOC_URL = "https://example.com/errors/invalid-input"; final String REQUEST_ID = "abc123"; RestClient client = mockClient(ERR_KEY, ERR_VALUE, ERR_MSG, HTTP_CODE, APP_CODE, CODE, DOC_URL, REQUEST_ID, protocolVersion, errorResponseHeaderName); Request<EmptyRecord> request = mockRequest(EmptyRecord.class, versionOption, contentType); RequestBuilder<Request<EmptyRecord>> requestBuilder = mockRequestBuilder(request); ResponseFuture<EmptyRecord> future = sendRequest(sendRequestOption, determineErrorHandlingBehavior(getResponseOption), client, request, requestBuilder); RestLiResponseException e = getErrorResponse(getResponseOption, future, timeoutOption); if (getResponseOption == GetResponseOption.GET_RESPONSE_ENTITY_EXPLICIT_NO_THROW) { Assert.assertNull(e); } else { Assert.assertEquals(HTTP_CODE, e.getStatus()); Assert.assertEquals(ERR_VALUE, e.getErrorDetails().get(ERR_KEY)); Assert.assertEquals(APP_CODE, e.getServiceErrorCode()); Assert.assertEquals(ERR_MSG, e.getServiceErrorMessage()); Assert.assertEquals(CODE, e.getCode()); Assert.assertEquals(DOC_URL, e.getDocUrl()); Assert.assertEquals(REQUEST_ID, e.getRequestId()); Assert.assertEquals(EmptyRecord.class.getCanonicalName(), e.getErrorDetailType()); Assert.assertNotNull(e.getErrorDetailsRecord()); Assert.assertTrue(e.getErrorDetailsRecord() instanceof EmptyRecord); } }
public void incrementAll(CounterMap<F, S> other) { for (Map.Entry<F, Counter<S>> entry : other.maps.entrySet()) { F key = entry.getKey(); Counter<S> innerCounter = entry.getValue(); for (Map.Entry<S, AtomicDouble> innerEntry : innerCounter.entrySet()) { S value = innerEntry.getKey(); incrementCount(key, value, innerEntry.getValue().get()); } } }
@Test public void testIncrementAll() { CounterMap<Integer, Integer> counterMapA = new CounterMap<>(); counterMapA.incrementCount(0, 0, 1); counterMapA.incrementCount(0, 1, 1); counterMapA.incrementCount(0, 2, 1); counterMapA.incrementCount(1, 0, 1); counterMapA.incrementCount(1, 1, 1); counterMapA.incrementCount(1, 2, 1); CounterMap<Integer, Integer> counterMapB = new CounterMap<>(); counterMapB.incrementCount(1, 1, 1); counterMapB.incrementCount(2, 1, 1); counterMapA.incrementAll(counterMapB); assertEquals(2.0, counterMapA.getCount(1,1), 1e-5); assertEquals(1.0, counterMapA.getCount(2,1), 1e-5); assertEquals(1.0, counterMapA.getCount(0,0), 1e-5); assertEquals(7, counterMapA.totalSize()); counterMapA.setCount(2, 1, 17); assertEquals(17.0, counterMapA.getCount(2, 1), 1e-5); }
@Override public Set<KubevirtFloatingIp> floatingIpsByRouter(String routerName) { checkArgument(!Strings.isNullOrEmpty(routerName), ERR_NULL_ROUTER_NAME); return kubevirtRouterStore.floatingIps().stream() .filter(ips -> routerName.equals(ips.routerName())) .collect(Collectors.toSet()); }
@Test public void testGetFloatingIpsByRouterName() { createBasicFloatingIpDisassociated(); assertEquals("Number of floating IPs did not match", 1, target.floatingIpsByRouter(ROUTER_NAME).size()); }
public void build(@Nullable SegmentVersion segmentVersion, ServerMetrics serverMetrics) throws Exception { SegmentGeneratorConfig genConfig = new SegmentGeneratorConfig(_tableConfig, _dataSchema); // The segment generation code in SegmentColumnarIndexCreator will throw // exception if start and end time in time column are not in acceptable // range. We don't want the realtime consumption to stop (if an exception // is thrown) and thus the time validity check is explicitly disabled for // realtime segment generation genConfig.setSegmentTimeValueCheck(false); if (_columnIndicesForRealtimeTable.getInvertedIndexColumns() != null) { genConfig.setIndexOn(StandardIndexes.inverted(), IndexConfig.ENABLED, _columnIndicesForRealtimeTable.getInvertedIndexColumns()); } if (_columnIndicesForRealtimeTable.getVarLengthDictionaryColumns() != null) { genConfig.setVarLengthDictionaryColumns(_columnIndicesForRealtimeTable.getVarLengthDictionaryColumns()); } if (segmentVersion != null) { genConfig.setSegmentVersion(segmentVersion); } genConfig.setTableName(_tableName); genConfig.setOutDir(_outputPath); genConfig.setSegmentName(_segmentName); addIndexOrDefault(genConfig, StandardIndexes.text(), _columnIndicesForRealtimeTable.getTextIndexColumns(), new TextIndexConfigBuilder(genConfig.getFSTIndexType()).build()); addIndexOrDefault(genConfig, StandardIndexes.fst(), _columnIndicesForRealtimeTable.getFstIndexColumns(), new FstIndexConfig(genConfig.getFSTIndexType())); SegmentPartitionConfig segmentPartitionConfig = _realtimeSegmentImpl.getSegmentPartitionConfig(); genConfig.setSegmentPartitionConfig(segmentPartitionConfig); genConfig.setNullHandlingEnabled(_nullHandlingEnabled); genConfig.setSegmentZKPropsConfig(_segmentZKPropsConfig); // flush any artifacts to disk to improve mutable to immutable segment conversion _realtimeSegmentImpl.commit(); SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl(); try (PinotSegmentRecordReader recordReader = new PinotSegmentRecordReader()) { int[] sortedDocIds = _columnIndicesForRealtimeTable.getSortedColumn() != null ? _realtimeSegmentImpl.getSortedDocIdIterationOrderWithSortedColumn( _columnIndicesForRealtimeTable.getSortedColumn()) : null; recordReader.init(_realtimeSegmentImpl, sortedDocIds); RealtimeSegmentSegmentCreationDataSource dataSource = new RealtimeSegmentSegmentCreationDataSource(_realtimeSegmentImpl, recordReader); driver.init(genConfig, dataSource, RecordEnricherPipeline.getPassThroughPipeline(), TransformPipeline.getPassThroughPipeline()); if (!_enableColumnMajor) { driver.build(); } else { driver.buildByColumn(_realtimeSegmentImpl); } } if (segmentPartitionConfig != null) { Map<String, ColumnPartitionConfig> columnPartitionMap = segmentPartitionConfig.getColumnPartitionMap(); for (String columnName : columnPartitionMap.keySet()) { int numPartitions = driver.getSegmentStats().getColumnProfileFor(columnName).getPartitions().size(); serverMetrics.addValueToTableGauge(_tableName, ServerGauge.REALTIME_SEGMENT_NUM_PARTITIONS, numPartitions); } } }
@Test public void test10RecordsIndexedRowMajorSegmentBuilder() throws Exception { File tmpDir = new File(TMP_DIR, "tmp_" + System.currentTimeMillis()); TableConfig tableConfig = new TableConfigBuilder(TableType.REALTIME).setTableName("testTable") .setTimeColumnName(DATE_TIME_COLUMN) .setInvertedIndexColumns(Lists.newArrayList(STRING_COLUMN1, LONG_COLUMN1)) .setSortedColumn(LONG_COLUMN1) .setRangeIndexColumns(Lists.newArrayList(STRING_COLUMN2)) .setNoDictionaryColumns(Lists.newArrayList(LONG_COLUMN2)) .setVarLengthDictionaryColumns(Lists.newArrayList(STRING_COLUMN3)) .setOnHeapDictionaryColumns(Lists.newArrayList(LONG_COLUMN3)) .setColumnMajorSegmentBuilderEnabled(false) .build(); Schema schema = new Schema.SchemaBuilder() .addSingleValueDimension(STRING_COLUMN1, FieldSpec.DataType.STRING) .addSingleValueDimension(STRING_COLUMN2, FieldSpec.DataType.STRING) .addSingleValueDimension(STRING_COLUMN3, FieldSpec.DataType.STRING) .addSingleValueDimension(STRING_COLUMN4, FieldSpec.DataType.STRING) .addSingleValueDimension(LONG_COLUMN1, FieldSpec.DataType.LONG) .addSingleValueDimension(LONG_COLUMN2, FieldSpec.DataType.LONG) .addSingleValueDimension(LONG_COLUMN3, FieldSpec.DataType.LONG) .addMultiValueDimension(MV_INT_COLUMN, FieldSpec.DataType.INT) .addMetric(LONG_COLUMN4, FieldSpec.DataType.LONG) .addDateTime(DATE_TIME_COLUMN, FieldSpec.DataType.LONG, "1:MILLISECONDS:EPOCH", "1:MILLISECONDS") .build(); String tableNameWithType = tableConfig.getTableName(); String segmentName = "testTable__0__0__123456"; IndexingConfig indexingConfig = tableConfig.getIndexingConfig(); DictionaryIndexConfig varLengthDictConf = new DictionaryIndexConfig(false, true); RealtimeSegmentConfig.Builder realtimeSegmentConfigBuilder = new RealtimeSegmentConfig.Builder().setTableNameWithType(tableNameWithType).setSegmentName(segmentName) .setStreamName(tableNameWithType).setSchema(schema).setTimeColumnName(DATE_TIME_COLUMN).setCapacity(1000) .setAvgNumMultiValues(3) .setIndex(Sets.newHashSet(LONG_COLUMN2), StandardIndexes.dictionary(), DictionaryIndexConfig.DISABLED) .setIndex(Sets.newHashSet(Sets.newHashSet(STRING_COLUMN3)), StandardIndexes.dictionary(), varLengthDictConf) .setIndex(Sets.newHashSet(STRING_COLUMN1, LONG_COLUMN1), StandardIndexes.inverted(), IndexConfig.ENABLED) .setSegmentZKMetadata(getSegmentZKMetadata(segmentName)).setOffHeap(true) .setMemoryManager(new DirectMemoryManager(segmentName)) .setStatsHistory(RealtimeSegmentStatsHistory.deserialzeFrom(new File(tmpDir, "stats"))) .setConsumerDir(new File(tmpDir, "consumerDir").getAbsolutePath()); // create mutable segment impl MutableSegmentImpl mutableSegmentImpl = new MutableSegmentImpl(realtimeSegmentConfigBuilder.build(), null); List<GenericRow> rows = generateTestData(); for (GenericRow row : rows) { mutableSegmentImpl.index(row, null); } File outputDir = new File(tmpDir, "outputDir"); SegmentZKPropsConfig segmentZKPropsConfig = new SegmentZKPropsConfig(); segmentZKPropsConfig.setStartOffset("1"); segmentZKPropsConfig.setEndOffset("100"); ColumnIndicesForRealtimeTable cdc = new ColumnIndicesForRealtimeTable(indexingConfig.getSortedColumn().get(0), indexingConfig.getInvertedIndexColumns(), null, null, indexingConfig.getNoDictionaryColumns(), indexingConfig.getVarLengthDictionaryColumns()); RealtimeSegmentConverter converter = new RealtimeSegmentConverter(mutableSegmentImpl, segmentZKPropsConfig, outputDir.getAbsolutePath(), schema, tableNameWithType, tableConfig, segmentName, cdc, false); converter.build(SegmentVersion.v3, null); File indexDir = new File(outputDir, segmentName); SegmentMetadataImpl segmentMetadata = new SegmentMetadataImpl(indexDir); assertEquals(segmentMetadata.getVersion(), SegmentVersion.v3); assertEquals(segmentMetadata.getTotalDocs(), rows.size()); assertEquals(segmentMetadata.getTimeColumn(), DATE_TIME_COLUMN); assertEquals(segmentMetadata.getTimeUnit(), TimeUnit.MILLISECONDS); long expectedStartTime = (long) rows.get(0).getValue(DATE_TIME_COLUMN); assertEquals(segmentMetadata.getStartTime(), expectedStartTime); long expectedEndTime = (long) rows.get(rows.size() - 1).getValue(DATE_TIME_COLUMN); assertEquals(segmentMetadata.getEndTime(), expectedEndTime); assertTrue(segmentMetadata.getAllColumns().containsAll(schema.getColumnNames())); assertEquals(segmentMetadata.getStartOffset(), "1"); assertEquals(segmentMetadata.getEndOffset(), "100"); testSegment(rows, indexDir, tableConfig, segmentMetadata); }
@GET @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 }) @Override public ClusterInfo get() { return getClusterInfo(); }
@Test public void testClusterMetricsXML() throws JSONException, Exception { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("cluster") .path("metrics").accept("application/xml").get(ClientResponse.class); assertEquals(MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8, response.getType().toString()); String xml = response.getEntity(String.class); verifyClusterMetricsXML(xml); }
@SuppressWarnings("deprecation") static Object[] buildArgs(final Object[] positionalArguments, final ResourceMethodDescriptor resourceMethod, final ServerResourceContext context, final DynamicRecordTemplate template, final ResourceMethodConfig resourceMethodConfig) { List<Parameter<?>> parameters = resourceMethod.getParameters(); Object[] arguments = Arrays.copyOf(positionalArguments, parameters.size()); fixUpComplexKeySingletonArraysInArguments(arguments); boolean attachmentsDesired = false; for (int i = positionalArguments.length; i < parameters.size(); ++i) { Parameter<?> param = parameters.get(i); try { if (param.getParamType() == Parameter.ParamType.KEY || param.getParamType() == Parameter.ParamType.ASSOC_KEY_PARAM) { Object value = context.getPathKeys().get(param.getName()); if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.CALLBACK) { continue; } else if (param.getParamType() == Parameter.ParamType.PARSEQ_CONTEXT_PARAM || param.getParamType() == Parameter.ParamType.PARSEQ_CONTEXT) { continue; // don't know what to fill in yet } else if (param.getParamType() == Parameter.ParamType.HEADER) { HeaderParam headerParam = param.getAnnotations().get(HeaderParam.class); String value = context.getRequestHeaders().get(headerParam.value()); arguments[i] = value; continue; } //Since we have multiple different types of MaskTrees that can be passed into resource methods, //we must evaluate based on the param type (annotation used) else if (param.getParamType() == Parameter.ParamType.PROJECTION || param.getParamType() == Parameter.ParamType.PROJECTION_PARAM) { arguments[i] = context.getProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.METADATA_PROJECTION_PARAM) { arguments[i] = context.getMetadataProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.PAGING_PROJECTION_PARAM) { arguments[i] = context.getPagingProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.CONTEXT || param.getParamType() == Parameter.ParamType.PAGING_CONTEXT_PARAM) { PagingContext ctx = RestUtils.getPagingContext(context, (PagingContext) param.getDefaultValue()); arguments[i] = ctx; continue; } else if (param.getParamType() == Parameter.ParamType.PATH_KEYS || param.getParamType() == Parameter.ParamType.PATH_KEYS_PARAM) { arguments[i] = context.getPathKeys(); continue; } else if (param.getParamType() == Parameter.ParamType.PATH_KEY_PARAM) { Object value = context.getPathKeys().get(param.getName()); if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.RESOURCE_CONTEXT || param.getParamType() == Parameter.ParamType.RESOURCE_CONTEXT_PARAM) { arguments[i] = context; continue; } else if (param.getParamType() == Parameter.ParamType.VALIDATOR_PARAM) { RestLiDataValidator validator = new RestLiDataValidator(resourceMethod.getResourceModel().getResourceClass().getAnnotations(), resourceMethod.getResourceModel().getValueClass(), resourceMethod.getMethodType()); arguments[i] = validator; continue; } else if (param.getParamType() == Parameter.ParamType.RESTLI_ATTACHMENTS_PARAM) { arguments[i] = context.getRequestAttachmentReader(); attachmentsDesired = true; continue; } else if (param.getParamType() == Parameter.ParamType.UNSTRUCTURED_DATA_WRITER_PARAM) { // The OutputStream is passed to the resource implementation in a synchronous call. Upon return of the // resource method, all the bytes would haven't written to the OutputStream. The EntityStream would have // contained all the bytes by the time data is requested. The ownership of the OutputStream is passed to // the ByteArrayOutputStreamWriter, which is responsible of closing the OutputStream if necessary. ByteArrayOutputStream out = new ByteArrayOutputStream(); context.setResponseEntityStream(EntityStreams.newEntityStream(new ByteArrayOutputStreamWriter(out))); arguments[i] = new UnstructuredDataWriter(out, context); continue; } else if (param.getParamType() == Parameter.ParamType.UNSTRUCTURED_DATA_REACTIVE_READER_PARAM) { arguments[i] = new UnstructuredDataReactiveReader(context.getRequestEntityStream(), context.getRawRequest().getHeader(RestConstants.HEADER_CONTENT_TYPE)); continue; } else if (param.getParamType() == Parameter.ParamType.POST) { // handle action parameters if (template != null) { DataMap data = template.data(); if (data.containsKey(param.getName())) { arguments[i] = template.getValue(param); continue; } } } else if (param.getParamType() == Parameter.ParamType.QUERY) { Object value; if (DataTemplate.class.isAssignableFrom(param.getType())) { value = buildDataTemplateArgument(context.getStructuredParameter(param.getName()), param, resourceMethodConfig.shouldValidateQueryParams()); } else { value = buildRegularArgument(context, param, resourceMethodConfig.shouldValidateQueryParams()); } if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.BATCH || param.getParamType() == Parameter.ParamType.RESOURCE_KEY) { // should not come to this routine since it should be handled by passing in positionalArguments throw new RoutingException("Parameter '" + param.getName() + "' should be passed in as a positional argument", HttpStatus.S_400_BAD_REQUEST.getCode()); } else { // unknown param type throw new RoutingException( "Parameter '" + param.getName() + "' has an unknown parameter type '" + param.getParamType().name() + "'", HttpStatus.S_400_BAD_REQUEST.getCode()); } } catch (TemplateRuntimeException e) { throw new RoutingException("Parameter '" + param.getName() + "' is invalid", HttpStatus.S_400_BAD_REQUEST.getCode()); } try { // Handling null-valued parameters not provided in resource context or entity body // check if it is optional parameter if (param.isOptional() && param.hasDefaultValue()) { arguments[i] = param.getDefaultValue(); } else if (param.isOptional() && !param.getType().isPrimitive()) { // optional primitive parameter must have default value or provided arguments[i] = null; } else { throw new RoutingException("Parameter '" + param.getName() + "' is required", HttpStatus.S_400_BAD_REQUEST.getCode()); } } catch (ResourceConfigException e) { // Parameter default value format exception should result in server error code 500. throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Parameter '" + param.getName() + "' default value is invalid", e); } } //Verify that if the resource method did not expect attachments, and attachments were present, that we drain all //incoming attachments and send back a bad request. We must take precaution here since simply ignoring the request //attachments is not correct behavior here. Ignoring other request level constructs such as headers or query parameters //that were not needed is safe, but not for request attachments. if (!attachmentsDesired && context.getRequestAttachmentReader() != null) { throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Resource method endpoint invoked does not accept any request attachments."); } return arguments; }
@Test(dataProvider = "noOpParameterData") public void testNoOpParamType(Class<?> dataType, Parameter.ParamType paramType) { String paramKey = "testParam"; ServerResourceContext mockResourceContext = EasyMock.createMock(ServerResourceContext.class); @SuppressWarnings({"unchecked","rawtypes"}) Parameter<?> param = new Parameter(paramKey, dataType, null, false, null, paramType, false, AnnotationSet.EMPTY); List<Parameter<?>> parameters = Collections.singletonList(param); Object[] results = ArgumentBuilder.buildArgs(new Object[0], getMockResourceMethod(parameters), mockResourceContext, null, getMockResourceMethodConfig(false)); Assert.assertEquals(results[0], null); }
static boolean explicitlyEc2Configured(AwsConfig awsConfig) { return !isNullOrEmptyAfterTrim(awsConfig.getHostHeader()) && awsConfig.getHostHeader().startsWith("ec2"); }
@Test public void explicitlyEc2Configured() { assertTrue(AwsClientConfigurator.explicitlyEc2Configured(AwsConfig.builder().setHostHeader("ec2").build())); assertTrue(AwsClientConfigurator.explicitlyEc2Configured( AwsConfig.builder().setHostHeader("ec2.us-east-1.amazonaws.com").build())); assertFalse(AwsClientConfigurator.explicitlyEc2Configured( AwsConfig.builder().setHostHeader("ecs.us-east-1.amazonaws.com").build())); assertFalse(AwsClientConfigurator.explicitlyEc2Configured(AwsConfig.builder().build())); }
@Override public int compare(ChronoZonedDateTime<?> date1, ChronoZonedDateTime<?> date2) { return ChronoZonedDateTime.timeLineOrder().compare(date1, date2); }
@Test void should_disregard_time_zone_difference() { ZonedDateTime now = ZonedDateTime.now(); ZonedDateTime inParis = now.withZoneSameInstant(ZoneId.of("Europe/Paris")); ZonedDateTime inNewYork = now.withZoneSameInstant(ZoneId.of("America/New_York")); assertThat(inParis.compareTo(inNewYork)).as("Built-in comparison should report that they differ").isNotZero(); assertThat(comparator.compare(inParis, inNewYork)).isZero(); }
public static long convertBytesToLong(byte[] bytes) { byte[] paddedBytes = paddingTo8Byte(bytes); long temp = 0L; for (int i = 7; i >= 0; i--) { temp = temp | (((long) paddedBytes[i] & 0xff) << (7 - i) * 8); } return temp; }
@Test public void testConvertBytesToLong() { long[] tests = new long[] {Long.MIN_VALUE, -1L, 0, 1L, Long.MAX_VALUE}; for (int i = 0; i < tests.length; i++) { assertEquals(BinaryUtil.convertBytesToLong(convertLongToBytes(tests[i])), tests[i]); } }
public static JibContainerBuilder toJibContainerBuilder( Path projectRoot, Path buildFilePath, Build buildCommandOptions, CommonCliOptions commonCliOptions, ConsoleLogger logger) throws InvalidImageReferenceException, IOException { BuildFileSpec buildFile = toBuildFileSpec(buildFilePath, buildCommandOptions.getTemplateParameters()); Optional<BaseImageSpec> baseImageSpec = buildFile.getFrom(); JibContainerBuilder containerBuilder = baseImageSpec.isPresent() ? createJibContainerBuilder(baseImageSpec.get(), commonCliOptions, logger) : Jib.fromScratch(); buildFile.getCreationTime().ifPresent(containerBuilder::setCreationTime); buildFile.getFormat().ifPresent(containerBuilder::setFormat); containerBuilder.setEnvironment(buildFile.getEnvironment()); containerBuilder.setLabels(buildFile.getLabels()); containerBuilder.setVolumes(buildFile.getVolumes()); containerBuilder.setExposedPorts(buildFile.getExposedPorts()); buildFile.getUser().ifPresent(containerBuilder::setUser); buildFile.getWorkingDirectory().ifPresent(containerBuilder::setWorkingDirectory); buildFile.getEntrypoint().ifPresent(containerBuilder::setEntrypoint); buildFile.getCmd().ifPresent(containerBuilder::setProgramArguments); Optional<LayersSpec> layersSpec = buildFile.getLayers(); if (layersSpec.isPresent()) { containerBuilder.setFileEntriesLayers(Layers.toLayers(projectRoot, layersSpec.get())); } return containerBuilder; }
@Test public void testToJibContainerBuilder_requiredProperties() throws URISyntaxException, IOException, InvalidImageReferenceException { Path buildfile = Paths.get(Resources.getResource("buildfiles/projects/allDefaults/jib.yaml").toURI()); JibContainerBuilder jibContainerBuilder = BuildFiles.toJibContainerBuilder( buildfile.getParent(), buildfile, buildCli, commonCliOptions, consoleLogger); ContainerBuildPlan resolved = jibContainerBuilder.toContainerBuildPlan(); Assert.assertEquals("scratch", resolved.getBaseImage()); Assert.assertEquals(ImmutableSet.of(new Platform("amd64", "linux")), resolved.getPlatforms()); Assert.assertEquals(Instant.EPOCH, resolved.getCreationTime()); Assert.assertEquals(ImageFormat.Docker, resolved.getFormat()); Assert.assertTrue(resolved.getEnvironment().isEmpty()); Assert.assertTrue(resolved.getLabels().isEmpty()); Assert.assertTrue(resolved.getVolumes().isEmpty()); Assert.assertTrue(resolved.getExposedPorts().isEmpty()); Assert.assertNull(resolved.getUser()); Assert.assertNull(resolved.getWorkingDirectory()); Assert.assertNull(resolved.getEntrypoint()); Assert.assertTrue(resolved.getLayers().isEmpty()); }
static List<byte[]> readPayloadFile(String payloadFilePath, String payloadDelimiter) throws IOException { List<byte[]> payloadByteList = new ArrayList<>(); if (payloadFilePath != null) { Path path = Paths.get(payloadFilePath); System.out.println("Reading payloads from: " + path.toAbsolutePath()); if (Files.notExists(path) || Files.size(path) == 0) { throw new IllegalArgumentException("File does not exist or empty file provided."); } String[] payloadList = new String(Files.readAllBytes(path), StandardCharsets.UTF_8).split(payloadDelimiter); System.out.println("Number of messages read: " + payloadList.length); for (String payload : payloadList) { payloadByteList.add(payload.getBytes(StandardCharsets.UTF_8)); } } return payloadByteList; }
@Test public void testReadPayloadFile() throws Exception { File payloadFile = createTempFile("Hello\nKafka"); String payloadFilePath = payloadFile.getAbsolutePath(); String payloadDelimiter = "\n"; List<byte[]> payloadByteList = ProducerPerformance.readPayloadFile(payloadFilePath, payloadDelimiter); assertEquals(2, payloadByteList.size()); assertEquals("Hello", new String(payloadByteList.get(0))); assertEquals("Kafka", new String(payloadByteList.get(1))); Utils.delete(payloadFile); }
@Override public CompletableFuture<ClusterInfo> getBrokerClusterInfo(String address, long timeoutMillis) { CompletableFuture<ClusterInfo> future = new CompletableFuture<>(); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.GET_BROKER_CLUSTER_INFO, null); remotingClient.invoke(address, request, timeoutMillis).thenAccept(response -> { if (response.getCode() == ResponseCode.SUCCESS) { ClusterInfo clusterInfo = ClusterInfo.decode(response.getBody(), ClusterInfo.class); future.complete(clusterInfo); } else { log.warn("getBrokerClusterInfo getResponseCommand failed, {} {}", response.getCode(), response.getRemark()); future.completeExceptionally(new MQClientException(response.getCode(), response.getRemark())); } }); return future; }
@Test public void assertGetBrokerClusterInfoWithSuccess() throws Exception { ClusterInfo responseBody = new ClusterInfo(); setResponseSuccess(RemotingSerializable.encode(responseBody)); CompletableFuture<ClusterInfo> actual = mqClientAdminImpl.getBrokerClusterInfo(defaultBrokerAddr, defaultTimeout); ClusterInfo result = actual.get(); assertNotNull(result); }
@Override public NodeId get() { return new SimpleNodeId(readOrGenerate(filename)); }
@Test void testNonexistentFile() throws IOException { final Path nodeIdPath = tempDir.resolve(NODE_ID_FILENAME); final String filename = nodeIdPath.toAbsolutePath().toString(); final FilePersistedNodeIdProvider provider = new FilePersistedNodeIdProvider(filename); // first let the logic generate and persist a new ID final NodeId nodeId = provider.get(); final String generatedNodeId = nodeId.getNodeId(); Assertions.assertThat(generatedNodeId).isNotBlank(); // verify that content of the file is the same as the returned ID Assertions.assertThat(Files.readString(Path.of(filename))).isEqualTo(generatedNodeId); // now let's start again, but with a file that already contains an ID final FilePersistedNodeIdProvider anotherProvider = new FilePersistedNodeIdProvider(filename); Assertions.assertThat(anotherProvider.get().getNodeId()).isEqualTo(generatedNodeId); }
@ApiOperation(value = "Delete a comment on a historic process instance", tags = { "History Process" }, code = 204) @ApiResponses(value = { @ApiResponse(code = 204, message = "Indicates the historic process instance and comment were found and the comment is deleted. Response body is left empty intentionally."), @ApiResponse(code = 404, message = "Indicates the requested historic process instance was not found or the historic process instance does not have a comment with the given ID.") }) @DeleteMapping(value = "/history/historic-process-instances/{processInstanceId}/comments/{commentId}") @ResponseStatus(HttpStatus.NO_CONTENT) public void deleteComment(@ApiParam(name = "processInstanceId") @PathVariable("processInstanceId") String processInstanceId, @ApiParam(name = "commentId") @PathVariable("commentId") String commentId) { HistoricProcessInstance instance = getHistoricProcessInstanceFromRequest(processInstanceId); Comment comment = taskService.getComment(commentId); if (comment == null || comment.getProcessInstanceId() == null || !comment.getProcessInstanceId().equals(instance.getId())) { throw new FlowableObjectNotFoundException("Process instance '" + instance.getId() + "' does not have a comment with id '" + commentId + "'.", Comment.class); } taskService.deleteComment(commentId); }
@Test @Deployment(resources = { "org/flowable/rest/service/api/repository/oneTaskProcess.bpmn20.xml" }) public void testGetComments() throws Exception { ProcessInstance pi = null; try { pi = runtimeService.startProcessInstanceByKey("oneTaskProcess"); // Add a comment as "kermit" identityService.setAuthenticatedUserId("kermit"); Comment comment = taskService.addComment(null, pi.getId(), "This is a comment..."); identityService.setAuthenticatedUserId(null); CloseableHttpResponse response = executeRequest( new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_HISTORIC_PROCESS_INSTANCE_COMMENT_COLLECTION, pi.getId())), HttpStatus.SC_OK); assertThat(response.getStatusLine().getStatusCode()).isEqualTo(HttpStatus.SC_OK); JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertThat(responseNode).isNotNull(); assertThatJson(responseNode) .when(Option.IGNORING_EXTRA_FIELDS) .isEqualTo(" [{" + " id: '" + comment.getId() + "'," + " author: 'kermit'," + " message: 'This is a comment...'," + " taskId: null," + " taskUrl: null," + " processInstanceId: '" + pi.getProcessInstanceId() + "'," + " processInstanceUrl: '" + SERVER_URL_PREFIX + RestUrls .createRelativeResourceUrl(RestUrls.URL_HISTORIC_PROCESS_INSTANCE_COMMENT, pi.getId(), comment.getId()) + "'" + "}]"); // Test with unexisting task closeResponse( executeRequest(new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_TASK_COMMENT_COLLECTION, "unexistingtask")), HttpStatus.SC_NOT_FOUND)); } finally { if (pi != null) { List<Comment> comments = taskService.getProcessInstanceComments(pi.getId()); for (Comment c : comments) { taskService.deleteComment(c.getId()); } } } }
static List<byte[]> readCertificates(Path path) throws CertificateException { final byte[] bytes; try { bytes = Files.readAllBytes(path); } catch (IOException e) { throw new CertificateException("Couldn't read certificates from file: " + path, e); } final String content = new String(bytes, StandardCharsets.US_ASCII); final Matcher m = CERT_PATTERN.matcher(content); final List<byte[]> certs = new ArrayList<>(); int start = 0; while (m.find(start)) { final String s = m.group(1); byte[] der = Base64.getDecoder().decode(CharMatcher.breakingWhitespace().removeFrom(s)); certs.add(der); start = m.end(); } if (certs.isEmpty()) { throw new CertificateException("No certificates found in file: " + path); } return certs; }
@Test(expected = CertificateException.class) public void readCertificatesFailsOnDirectory() throws Exception { final File folder = temporaryFolder.newFolder(); PemReader.readCertificates(folder.toPath()); }
@Override public Object getValue(final int columnIndex, final Class<?> type) throws SQLException { return queryResult.getValue(columnIndex, type); }
@Test void assertGetValue() throws SQLException { QueryResult queryResult = mock(QueryResult.class); when(queryResult.getValue(1, Object.class)).thenReturn("1"); TransparentMergedResult actual = new TransparentMergedResult(queryResult); assertThat(actual.getValue(1, Object.class).toString(), is("1")); }
public byte[] readAll() throws IOException { if (pos == 0 && count == buf.length) { pos = count; return buf; } byte[] ret = new byte[count - pos]; super.read(ret); return ret; }
@Test public void testReadAll() throws IOException { assertEquals(TEST_DATA.length, exposedStream.available()); byte[] data = exposedStream.readAll(); assertArrayEquals(TEST_DATA, data); assertSame(TEST_DATA, data); assertEquals(0, exposedStream.available()); }
public Schema toCamelCase() { return this.getFields().stream() .map( field -> { FieldType innerType = field.getType(); if (innerType.getRowSchema() != null) { Schema innerCamelCaseSchema = innerType.getRowSchema().toCamelCase(); innerType = innerType.toBuilder().setRowSchema(innerCamelCaseSchema).build(); field = field.toBuilder().setType(innerType).build(); } return field .toBuilder() .setName(CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.LOWER_CAMEL, field.getName())) .build(); }) .collect(toSchema()); }
@Test public void testToCamelCase() { Schema innerSchema = Schema.builder() .addStringField("my_first_nested_string_field") .addStringField("my_second_nested_string_field") .build(); Schema schema = Schema.builder() .addStringField("my_first_string_field") .addStringField("my_second_string_field") .addRowField("my_row_field", innerSchema) .build(); Schema expectedInnerCamelCaseSchema = Schema.builder() .addStringField("myFirstNestedStringField") .addStringField("mySecondNestedStringField") .build(); Schema expectedCamelCaseSchema = Schema.builder() .addStringField("myFirstStringField") .addStringField("mySecondStringField") .addRowField("myRowField", expectedInnerCamelCaseSchema) .build(); assertTrue(schema.toCamelCase().hasField("myRowField")); assertEquals( expectedInnerCamelCaseSchema, schema.toCamelCase().getField("myRowField").getType().getRowSchema()); assertEquals(expectedCamelCaseSchema, schema.toCamelCase()); }
@Override public CompletableFuture<Void> updateOrCreateTopic(String address, CreateTopicRequestHeader requestHeader, long timeoutMillis) { CompletableFuture<Void> future = new CompletableFuture<>(); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.UPDATE_AND_CREATE_TOPIC, requestHeader); remotingClient.invoke(address, request, timeoutMillis).thenAccept(response -> { if (response.getCode() == ResponseCode.SUCCESS) { future.complete(null); } else { log.warn("updateOrCreateTopic getResponseCommand failed, {} {}, header={}", response.getCode(), response.getRemark(), requestHeader); future.completeExceptionally(new MQClientException(response.getCode(), response.getRemark())); } }); return future; }
@Test public void assertUpdateOrCreateTopicWithSuccess() throws Exception { setResponseSuccess(null); CreateTopicRequestHeader requestHeader = mock(CreateTopicRequestHeader.class); CompletableFuture<Void> actual = mqClientAdminImpl.updateOrCreateTopic(defaultBrokerAddr, requestHeader, defaultTimeout); assertNull(actual.get()); }
@Override public void validateSmsCode(SmsCodeValidateReqDTO reqDTO) { validateSmsCode0(reqDTO.getMobile(), reqDTO.getCode(), reqDTO.getScene()); }
@Test public void validateSmsCode_expired() { // 准备参数 SmsCodeValidateReqDTO reqDTO = randomPojo(SmsCodeValidateReqDTO.class, o -> { o.setMobile("15601691300"); o.setScene(randomEle(SmsSceneEnum.values()).getScene()); }); // mock 数据 SqlConstants.init(DbType.MYSQL); smsCodeMapper.insert(randomPojo(SmsCodeDO.class, o -> o.setMobile(reqDTO.getMobile()) .setScene(reqDTO.getScene()).setCode(reqDTO.getCode()).setUsed(false) .setCreateTime(LocalDateTime.now().minusMinutes(6)))); // 调用,并断言异常 assertServiceException(() -> smsCodeService.validateSmsCode(reqDTO), SMS_CODE_EXPIRED); }
@Override public void encode(Event event, OutputStream output) throws IOException { String outputString = (format == null ? JSON_MAPPER.writeValueAsString(event.getData()) : StringInterpolation.evaluate(event, format)) + delimiter; output.write(outputString.getBytes(charset)); }
@Test public void testEncodeWithCharset() throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); byte[] rightSingleQuoteInUtf8 = {(byte) 0xE2, (byte) 0x80, (byte) 0x99}; String rightSingleQuote = new String(rightSingleQuoteInUtf8, Charset.forName("UTF-8")); // encode with cp-1252 Map<String, Object> config = new HashMap<>(); config.put("charset", "cp1252"); config.put("format", "%{message}"); config.put("delimiter", ""); Event e1 = new Event(Collections.singletonMap("message", rightSingleQuote)); Line cp1252encoder = new Line(new ConfigurationImpl(config), new TestContext()); byte[] rightSingleQuoteInCp1252 = {(byte) 0x92}; cp1252encoder.encode(e1, outputStream); byte[] resultBytes = outputStream.toByteArray(); Assert.assertArrayEquals(rightSingleQuoteInCp1252, resultBytes); }
public static void returnCompressor(Compressor compressor) { if (compressor == null) { return; } // if the compressor can't be reused, don't pool it. if (compressor.getClass().isAnnotationPresent(DoNotPool.class)) { compressor.end(); return; } compressor.reset(); if (payback(compressorPool, compressor)) { updateLeaseCount(compressorCounts, compressor, -1); } }
@Test(timeout = 10000) public void testDoNotPoolCompressorNotUseableAfterReturn() throws Exception { final GzipCodec gzipCodec = new GzipCodec(); gzipCodec.setConf(new Configuration()); // BuiltInGzipCompressor is an explicit example of a Compressor with the @DoNotPool annotation final Compressor compressor = new BuiltInGzipCompressor(new Configuration()); CodecPool.returnCompressor(compressor); final CompressionOutputStream outputStream = gzipCodec.createOutputStream(new ByteArrayOutputStream(), compressor); LambdaTestUtils.intercept( AlreadyClosedException.class, "compress called on closed compressor", "Compressor from Codec with @DoNotPool should not be " + "useable after returning to CodecPool", () -> outputStream.write(1)); }
@Override public Stadium findById(final Long id) { return stadiumRepository.findById(id); }
@Test void findById는_존재하지_않는_경기장_요청에_예외를_반환한다() { // given final Long stadiumId = -999L; // when // then assertThatThrownBy(() -> stadiumReadService.findById(stadiumId)) .isInstanceOf(StadiumNotFoundException.class); }
@SuppressWarnings("unchecked") public static SelType box(Object o) { if (o == null) { // returned null from a method, representing void or object return SelType.NULL; } SelTypes type = fromClazzToSelType(o.getClass()); switch (type) { case STRING: return SelString.of((String) o); case LONG: return SelLong.of(((Number) o).longValue()); case DOUBLE: return SelDouble.of(((Number) o).doubleValue()); case BOOLEAN: return SelBoolean.of((Boolean) o); case STRING_ARRAY: case LONG_ARRAY: case DOUBLE_ARRAY: case BOOLEAN_ARRAY: return SelArray.of(o, type); case MAP: return SelMap.of((Map<String, Object>) o); case DATETIME: return SelJodaDateTime.of((DateTime) o); case DATETIME_PROPERTY: return SelJodaDateTimeProperty.of((DateTime.Property) o); } throw new UnsupportedOperationException( "Not support to box an object " + o + " for type " + type.name()); }
@Test public void testBox() { Object[] testObjects = new Object[] { null, "abc", 1, 1L, true, new String[] {"foo", "bar"}, new Long[] {1L, 2L}, new long[] {1L, 2L}, new Integer[] {3, 4}, new int[] {5, 6, 7}, new Boolean[] {true, false}, new boolean[] {}, new HashMap(), new DateTime(DateTimeZone.UTC), new DateTime(DateTimeZone.UTC).dayOfWeek() }; String[] expectedResults = new String[] { "NULL: NULL", "STRING: abc", "LONG: 1", "LONG: 1", "BOOLEAN: true", "STRING_ARRAY: [foo, bar]", "LONG_ARRAY: [1, 2]", "LONG_ARRAY: [1, 2]", "LONG_ARRAY: [3, 4]", "LONG_ARRAY: [5, 6, 7]", "BOOLEAN_ARRAY: [true, false]", "BOOLEAN_ARRAY: []", "MAP: {}", "DATETIME: 1970-01-01T00:00:12.345Z", "DATETIME_PROPERTY: Property[dayOfWeek]" }; for (int i = 0; i < testObjects.length; ++i) { SelType res = SelTypeUtil.box(testObjects[i]); assertEquals(expectedResults[i], res.type() + ": " + res.toString()); } }
public static Set<IDWithIssuer> pidsOf(Attributes attrs) { IDWithIssuer pid = IDWithIssuer.pidOf(attrs); Sequence opidseq = attrs.getSequence(Tag.OtherPatientIDsSequence); if (opidseq == null || opidseq.isEmpty()) if (pid == null) return Collections.emptySet(); else return Collections.singleton(pid); Set<IDWithIssuer> pids = new LinkedHashSet<>((1 + opidseq.size()) << 1); if (pid != null) pids.add(pid); for (Attributes item : opidseq) addTo(IDWithIssuer.pidOf(item), pids); return pids; }
@Test public void no_main_id_returns_all() { Attributes rootWithMainId = createIdWithNS(NS); rootWithMainId.newSequence(OtherPatientIDsSequence, 0); Sequence other = rootWithMainId.getSequence(OtherPatientIDsSequence); Attributes otherPatientId = otherPatientIds("other_ns"); other.add(otherPatientId); Set<IDWithIssuer> all = IDWithIssuer.pidsOf(rootWithMainId); assertEquals("Same pid but for different issuer should not be removed!", all.size(), 2); }
public static BytesInput fromInt(int intValue) { return new IntBytesInput(intValue); }
@Test public void testFromInt() throws IOException { int value = RANDOM.nextInt(); ByteArrayOutputStream baos = new ByteArrayOutputStream(4); BytesUtils.writeIntLittleEndian(baos, value); byte[] data = baos.toByteArray(); Supplier<BytesInput> factory = () -> BytesInput.fromInt(value); validate(data, factory); }
@Override public void init(final Properties props) { this.props = props; cryptographicAlgorithm = TypedSPILoader.getService(CryptographicAlgorithm.class, getType(), props); }
@Test void assertCreateNewInstanceWithEmptyAESKey() { assertThrows(AlgorithmInitializationException.class, () -> encryptAlgorithm.init(PropertiesBuilder.build(new Property("aes-key-value", "")))); }
public static TraceTransferBean encoderFromContextBean(TraceContext ctx) { if (ctx == null) { return null; } //build message trace of the transferring entity content bean TraceTransferBean transferBean = new TraceTransferBean(); StringBuilder sb = new StringBuilder(256); switch (ctx.getTraceType()) { case Pub: { TraceBean bean = ctx.getTraceBeans().get(0); //append the content of context and traceBean to transferBean's TransData sb.append(ctx.getTraceType()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getTimeStamp()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getRegionId()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getGroupName()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTopic()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTags()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getKeys()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getStoreHost()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getBodyLength()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getCostTime()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgType().ordinal()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getOffsetMsgId()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.isSuccess()).append(TraceConstants.FIELD_SPLITOR);// } break; case SubBefore: { for (TraceBean bean : ctx.getTraceBeans()) { sb.append(ctx.getTraceType()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getTimeStamp()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getRegionId()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getGroupName()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getRequestId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getRetryTimes()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getKeys()).append(TraceConstants.FIELD_SPLITOR);// } } break; case SubAfter: { for (TraceBean bean : ctx.getTraceBeans()) { sb.append(ctx.getTraceType()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getRequestId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgId()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getCostTime()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.isSuccess()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getKeys()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getContextCode()).append(TraceConstants.CONTENT_SPLITOR); if (!ctx.getAccessChannel().equals(AccessChannel.CLOUD)) { sb.append(ctx.getTimeStamp()).append(TraceConstants.CONTENT_SPLITOR); sb.append(ctx.getGroupName()); } sb.append(TraceConstants.FIELD_SPLITOR); } } break; case EndTransaction: { TraceBean bean = ctx.getTraceBeans().get(0); sb.append(ctx.getTraceType()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getTimeStamp()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getRegionId()).append(TraceConstants.CONTENT_SPLITOR)// .append(ctx.getGroupName()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTopic()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTags()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getKeys()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getStoreHost()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getMsgType().ordinal()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTransactionId()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.getTransactionState().name()).append(TraceConstants.CONTENT_SPLITOR)// .append(bean.isFromTransactionCheck()).append(TraceConstants.FIELD_SPLITOR); } break; default: } transferBean.setTransData(sb.toString()); for (TraceBean bean : ctx.getTraceBeans()) { transferBean.getTransKey().add(bean.getMsgId()); if (bean.getKeys() != null && bean.getKeys().length() > 0) { String[] keys = bean.getKeys().split(MessageConst.KEY_SEPARATOR); transferBean.getTransKey().addAll(Arrays.asList(keys)); } } return transferBean; }
@Test public void testEndTrxTraceDataFormatTest() { TraceContext endTrxContext = new TraceContext(); endTrxContext.setTraceType(TraceType.EndTransaction); endTrxContext.setGroupName("PID-test"); endTrxContext.setRegionId("DefaultRegion"); endTrxContext.setTimeStamp(time); TraceBean endTrxTraceBean = new TraceBean(); endTrxTraceBean.setTopic("topic-test"); endTrxTraceBean.setKeys("Keys"); endTrxTraceBean.setTags("Tags"); endTrxTraceBean.setMsgId("AC1415116D1418B4AAC217FE1B4E0000"); endTrxTraceBean.setStoreHost("127.0.0.1:10911"); endTrxTraceBean.setMsgType(MessageType.Trans_msg_Commit); endTrxTraceBean.setTransactionId("transactionId"); endTrxTraceBean.setTransactionState(LocalTransactionState.COMMIT_MESSAGE); endTrxTraceBean.setFromTransactionCheck(false); List<TraceBean> traceBeans = new ArrayList<>(); traceBeans.add(endTrxTraceBean); endTrxContext.setTraceBeans(traceBeans); TraceTransferBean traceTransferBean = TraceDataEncoder.encoderFromContextBean(endTrxContext); String transData = traceTransferBean.getTransData(); Assert.assertNotNull(transData); String[] items = transData.split(String.valueOf(TraceConstants.CONTENT_SPLITOR)); Assert.assertEquals(13, items.length); }
public static <K, E> Collector<E, ImmutableSetMultimap.Builder<K, E>, ImmutableSetMultimap<K, E>> unorderedIndex(Function<? super E, K> keyFunction) { return unorderedIndex(keyFunction, Function.identity()); }
@Test public void unorderedIndex_with_valueFunction_parallel_stream() { SetMultimap<String, String> multimap = HUGE_LIST.parallelStream().collect(unorderedIndex(identity(), identity())); assertThat(multimap.keySet()).isEqualTo(HUGE_SET); }
public <T> Map<String, Object> schemas(Class<? extends T> cls) { return this.schemas(cls, false); }
@SuppressWarnings("unchecked") @Test void task() throws URISyntaxException { Helpers.runApplicationContext((applicationContext) -> { JsonSchemaGenerator jsonSchemaGenerator = applicationContext.getBean(JsonSchemaGenerator.class); Map<String, Object> generate = jsonSchemaGenerator.schemas(Task.class); var definitions = (Map<String, Map<String, Object>>) generate.get("definitions"); var task = definitions.get(Task.class.getName()); Assertions.assertNotNull(task.get("oneOf")); }); }
@Override public List<CodegenColumnDO> getCodegenColumnListByTableId(Long tableId) { return codegenColumnMapper.selectListByTableId(tableId); }
@Test public void testGetCodegenColumnListByTableId() { // mock 数据 CodegenColumnDO column01 = randomPojo(CodegenColumnDO.class); codegenColumnMapper.insert(column01); CodegenColumnDO column02 = randomPojo(CodegenColumnDO.class); codegenColumnMapper.insert(column02); // 准备参数 Long tableId = column01.getTableId(); // 调用 List<CodegenColumnDO> result = codegenService.getCodegenColumnListByTableId(tableId); // 断言 assertEquals(1, result.size()); assertPojoEquals(column01, result.get(0)); }
@Override public int getMaxRequestsPerConnection() { return clientConfig.getPropertyAsInteger(MAX_REQUESTS_PER_CONNECTION, DEFAULT_MAX_REQUESTS_PER_CONNECTION); }
@Test void testGetMaxRequestsPerConnectionOverride() { clientConfig.set(MAX_REQUESTS_PER_CONNECTION, 2000); assertEquals(2000, connectionPoolConfig.getMaxRequestsPerConnection()); }
@Override public ResourceReconcileResult tryReconcileClusterResources( TaskManagerResourceInfoProvider taskManagerResourceInfoProvider) { ResourceReconcileResult.Builder builder = ResourceReconcileResult.builder(); List<TaskManagerInfo> taskManagersIdleTimeout = new ArrayList<>(); List<TaskManagerInfo> taskManagersNonTimeout = new ArrayList<>(); long currentTime = System.currentTimeMillis(); taskManagerResourceInfoProvider .getRegisteredTaskManagers() .forEach( taskManagerInfo -> { if (taskManagerInfo.isIdle() && currentTime - taskManagerInfo.getIdleSince() >= taskManagerTimeout.toMilliseconds()) { taskManagersIdleTimeout.add(taskManagerInfo); } else { taskManagersNonTimeout.add(taskManagerInfo); } }); List<PendingTaskManager> pendingTaskManagersNonUse = new ArrayList<>(); List<PendingTaskManager> pendingTaskManagersInuse = new ArrayList<>(); taskManagerResourceInfoProvider .getPendingTaskManagers() .forEach( pendingTaskManager -> { if (pendingTaskManager.getPendingSlotAllocationRecords().isEmpty()) { pendingTaskManagersNonUse.add(pendingTaskManager); } else { pendingTaskManagersInuse.add(pendingTaskManager); } }); ResourceProfile resourcesToKeep = ResourceProfile.ZERO; ResourceProfile resourcesInTotal = ResourceProfile.ZERO; boolean resourceFulfilled = false; // check whether available resources of used (pending) task manager is enough. ResourceProfile resourcesAvailableOfNonIdle = getAvailableResourceOfTaskManagers(taskManagersNonTimeout); ResourceProfile resourcesInTotalOfNonIdle = getTotalResourceOfTaskManagers(taskManagersNonTimeout); resourcesToKeep = resourcesToKeep.merge(resourcesAvailableOfNonIdle); resourcesInTotal = resourcesInTotal.merge(resourcesInTotalOfNonIdle); if (isRequiredResourcesFulfilled(resourcesToKeep, resourcesInTotal)) { resourceFulfilled = true; } else { ResourceProfile resourcesAvailableOfNonIdlePendingTaskManager = getAvailableResourceOfPendingTaskManagers(pendingTaskManagersInuse); ResourceProfile resourcesInTotalOfNonIdlePendingTaskManager = getTotalResourceOfPendingTaskManagers(pendingTaskManagersInuse); resourcesToKeep = resourcesToKeep.merge(resourcesAvailableOfNonIdlePendingTaskManager); resourcesInTotal = resourcesInTotal.merge(resourcesInTotalOfNonIdlePendingTaskManager); } // try reserve or release unused (pending) task managers for (TaskManagerInfo taskManagerInfo : taskManagersIdleTimeout) { if (resourceFulfilled || isRequiredResourcesFulfilled(resourcesToKeep, resourcesInTotal)) { resourceFulfilled = true; builder.addTaskManagerToRelease(taskManagerInfo); } else { resourcesToKeep = resourcesToKeep.merge(taskManagerInfo.getAvailableResource()); resourcesInTotal = resourcesInTotal.merge(taskManagerInfo.getTotalResource()); } } for (PendingTaskManager pendingTaskManager : pendingTaskManagersNonUse) { if (resourceFulfilled || isRequiredResourcesFulfilled(resourcesToKeep, resourcesInTotal)) { resourceFulfilled = true; builder.addPendingTaskManagerToRelease(pendingTaskManager); } else { resourcesToKeep = resourcesToKeep.merge(pendingTaskManager.getUnusedResource()); resourcesInTotal = resourcesInTotal.merge(pendingTaskManager.getTotalResourceProfile()); } } if (!resourceFulfilled) { // fulfill required resources tryFulFillRequiredResourcesWithAction( resourcesToKeep, resourcesInTotal, builder::addPendingTaskManagerToAllocate); } return builder.build(); }
@Test void testRedundantResourceShouldBeReserved() { final TaskManagerInfo taskManagerInUse = new TestingTaskManagerInfo( DEFAULT_SLOT_RESOURCE.multiply(5), DEFAULT_SLOT_RESOURCE.multiply(2), DEFAULT_SLOT_RESOURCE); final TestingTaskManagerInfo taskManagerIdle = new TestingTaskManagerInfo( DEFAULT_SLOT_RESOURCE.multiply(5), DEFAULT_SLOT_RESOURCE.multiply(5), DEFAULT_SLOT_RESOURCE); taskManagerIdle.setIdleSince(System.currentTimeMillis() - 10); final PendingTaskManager pendingTaskManagerIdle = new PendingTaskManager(DEFAULT_SLOT_RESOURCE.multiply(5), NUM_OF_SLOTS); final TaskManagerResourceInfoProvider taskManagerResourceInfoProvider = TestingTaskManagerResourceInfoProvider.newBuilder() .setRegisteredTaskManagersSupplier( () -> Arrays.asList(taskManagerInUse, taskManagerIdle)) .setPendingTaskManagersSupplier( () -> Collections.singletonList(pendingTaskManagerIdle)) .build(); DefaultResourceAllocationStrategy strategy = createStrategy(1); ResourceReconcileResult result = strategy.tryReconcileClusterResources(taskManagerResourceInfoProvider); // pending task manager should release at first assertThat(result.getPendingTaskManagersToRelease()) .containsExactly(pendingTaskManagerIdle); // idle task manager should reserved for redundant assertThat(result.getTaskManagersToRelease()).isEmpty(); }
public void poll(RequestFuture<?> future) { while (!future.isDone()) poll(time.timer(Long.MAX_VALUE), future); }
@Test public void blockOnlyForRetryBackoffIfNoInflightRequests() { long retryBackoffMs = 100L; NetworkClient mockNetworkClient = mock(NetworkClient.class); ConsumerNetworkClient consumerClient = new ConsumerNetworkClient(new LogContext(), mockNetworkClient, metadata, time, retryBackoffMs, 1000, Integer.MAX_VALUE); when(mockNetworkClient.inFlightRequestCount()).thenReturn(0); consumerClient.poll(time.timer(Long.MAX_VALUE), () -> true); verify(mockNetworkClient).poll(eq(retryBackoffMs), anyLong()); }
public static Builder route() { return new RouterFunctionBuilder(); }
@Test void and() { HandlerFunction<ServerResponse> handlerFunction = request -> ServerResponse.ok().build(); RouterFunction<ServerResponse> routerFunction1 = request -> Optional.empty(); RouterFunction<ServerResponse> routerFunction2 = request -> Optional.of(handlerFunction); RouterFunction<ServerResponse> result = routerFunction1.and(routerFunction2); assertThat(result).isNotNull(); Optional<HandlerFunction<ServerResponse>> resultHandlerFunction = result.route(request); assertThat(resultHandlerFunction).isPresent(); assertThat(resultHandlerFunction).contains(handlerFunction); }
@Operation(summary = "queryProcessDefinitionByCode", description = "QUERY_PROCESS_DEFINITION_BY_CODE_NOTES") @Parameters({ @Parameter(name = "code", description = "PROCESS_DEFINITION_CODE", required = true, schema = @Schema(implementation = long.class, example = "123456789")) }) @GetMapping(value = "/{code}") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_DETAIL_OF_PROCESS_DEFINITION_ERROR) public Result queryProcessDefinitionByCode(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @PathVariable(value = "code", required = true) long code) { Map<String, Object> result = processDefinitionService.queryProcessDefinitionByCode(loginUser, projectCode, code); return returnDataList(result); }
@Test public void testQueryProcessDefinitionByCode() { String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; long projectCode = 1L; String name = "dag_test"; String description = "desc test"; long code = 1L; ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setProjectCode(projectCode); processDefinition.setDescription(description); processDefinition.setCode(code); processDefinition.setLocations(locations); processDefinition.setName(name); Map<String, Object> result = new HashMap<>(); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, processDefinition); Mockito.when(processDefinitionService.queryProcessDefinitionByCode(user, projectCode, code)).thenReturn(result); Result response = processDefinitionController.queryProcessDefinitionByCode(user, projectCode, code); Assertions.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); }
public ImmutableMap<String, Pipeline> resolvePipelines(PipelineMetricRegistry pipelineMetricRegistry) { final Map<String, Rule> ruleNameMap = resolveRules(); // Read all pipelines and parse them final ImmutableMap.Builder<String, Pipeline> pipelineIdMap = ImmutableMap.builder(); try (final var pipelineStream = pipelineDaoSupplier.get()) { pipelineStream.forEach(pipelineDao -> { Pipeline pipeline; try { pipeline = ruleParser.parsePipeline(pipelineDao.id(), pipelineDao.source()); } catch (ParseException e) { LOG.warn("Ignoring non parseable pipeline <{}/{}> with errors <{}>", pipelineDao.title(), pipelineDao.id(), e.getErrors()); pipeline = Pipeline.empty("Failed to parse pipeline: " + pipelineDao.id()); } //noinspection ConstantConditions pipelineIdMap.put(pipelineDao.id(), resolvePipeline(pipelineMetricRegistry, pipeline, ruleNameMap)); }); } return pipelineIdMap.build(); }
@Test void resolvePipelinesWithMissingRule() { final var registry = PipelineMetricRegistry.create(metricRegistry, Pipeline.class.getName(), Rule.class.getName()); final var resolver = new PipelineResolver( new PipelineRuleParser(new FunctionRegistry(Map.of())), PipelineResolverConfig.of(Stream::of, () -> Stream.of(pipeline1)) ); final var pipelines = resolver.resolvePipelines(registry); assertThat(pipelines).hasSize(1); assertThat(pipelines.get("pipeline-1")).satisfies(pipeline -> { assertThat(pipeline.id()).isEqualTo("pipeline-1"); assertThat(pipeline.name()).isEqualTo("test-pipeline-1"); assertThat(pipeline.stages()).hasSize(1); assertThat(pipeline.stages().first()).satisfies(stage -> { assertThat(stage.stage()).isEqualTo(5); assertThat(stage.match()).isEqualTo(Stage.Match.EITHER); assertThat(stage.ruleReferences()).isEqualTo(List.of("test-rule-1")); assertThat(stage.getRules()).hasSize(1); assertThat(stage.getRules().get(0)).satisfies(rule -> { assertThat(rule.id()).isNull(); assertThat(rule.name()) .withFailMessage("Unresolved rules should have a static title") .isEqualTo("Unresolved rule test-rule-1"); assertThat(rule.when().evaluateBool(EvaluationContext.emptyContext())) .withFailMessage("Unresolved rules should evaluate to false") .isEqualTo(false); }); }); }); }
@Override public Iterator<IndexKeyEntries> getSqlRecordIteratorBatch(@Nonnull Comparable value, boolean descending) { return getSqlRecordIteratorBatch(value, descending, null); }
@Test public void getRecordsUsingExactValueDescending() { var expectedOrder = List.of(7, 4, 1); var actual = store.getSqlRecordIteratorBatch(1, true); assertResult(expectedOrder, actual); }
public void deleteAllCommentsByBoardId(final Long deletedBoardId) { commentRepository.deleteAllByBoardId(deletedBoardId); }
@Test void 댓글을_게시글_id로_모두_제거한다() { // given Comment saved = commentRepository.save(댓글_생성()); // when assertDoesNotThrow(() -> commentService.deleteAllCommentsByBoardId(saved.getBoardId())); // then List<CommentSimpleResponse> result = commentQueryService.findAllCommentsByBoardId(saved.getBoardId(), 1L, null, 10); assertThat(result).isEmpty(); }
private Namespace(String[] levels) { this.levels = levels; }
@Test public void testNamespace() { String[] levels = {"a", "b", "c", "d"}; Namespace namespace = Namespace.of(levels); assertThat(namespace).isNotNull(); assertThat(namespace.levels()).hasSize(4); assertThat(namespace).hasToString("a.b.c.d"); for (int i = 0; i < levels.length; i++) { assertThat(namespace.level(i)).isEqualTo(levels[i]); } }