focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public ReliableTopicConfig setExecutor(Executor executor) { this.executor = executor; return this; }
@Test public void setExecutor() { ReliableTopicConfig config = new ReliableTopicConfig("foo"); Executor executor = mock(Executor.class); config.setExecutor(executor); assertSame(executor, config.getExecutor()); config.setExecutor(null); assertNull(config.getExecutor()); }
public String map(AmountRequest request) { if (request instanceof OffsetBasedPageRequest && ((OffsetBasedPageRequest) request).getOffset() > 0L) { return sqlOffsetBasedPageRequestMapper.mapToSqlQuery((OffsetBasedPageRequest) request, jobTable); } else { return sqlAmountRequestMapper.mapToSqlQuery(request, jobTable); } }
@Test void sqlJobPageRequestMapperMapsOffsetBasedPageRequestSwitchesToAmountIfOffsetIs0() { OffsetBasedPageRequest offsetBasedPageRequest = ascOnUpdatedAt(0, 10); String filter = jobPageRequestMapper.map(offsetBasedPageRequest); assertThat(filter).isEqualTo(" ORDER BY updatedAt ASC LIMIT :limit"); }
@Override public ClusterHealth checkCluster() { checkState(!nodeInformation.isStandalone(), "Clustering is not enabled"); checkState(sharedHealthState != null, "HealthState instance can't be null when clustering is enabled"); Set<NodeHealth> nodeHealths = sharedHealthState.readAll(); Health health = clusterHealthChecks.stream() .map(clusterHealthCheck -> clusterHealthCheck.check(nodeHealths)) .reduce(Health.GREEN, HealthReducer::merge); return new ClusterHealth(health, nodeHealths); }
@Test public void checkCluster_fails_with_ISE_in_standalone() { when(nodeInformation.isStandalone()).thenReturn(true); HealthCheckerImpl underTest = new HealthCheckerImpl(nodeInformation, new NodeHealthCheck[0], new ClusterHealthCheck[0], sharedHealthState); assertThatThrownBy(() -> underTest.checkCluster()) .isInstanceOf(IllegalStateException.class) .hasMessageContaining("Clustering is not enabled"); }
@Override public boolean test(Creature t) { return t.getColor().equals(color); }
@Test void testColor() { final var greenCreature = mock(Creature.class); when(greenCreature.getColor()).thenReturn(Color.GREEN); final var redCreature = mock(Creature.class); when(redCreature.getColor()).thenReturn(Color.RED); final var greenSelector = new ColorSelector(Color.GREEN); assertTrue(greenSelector.test(greenCreature)); assertFalse(greenSelector.test(redCreature)); }
@Override public void onAppAuthChanged(final List<AppAuthData> appAuthDataList, final DataEventTypeEnum eventType) { WebsocketData<AppAuthData> configData = new WebsocketData<>(ConfigGroupEnum.APP_AUTH.name(), eventType.name(), appAuthDataList); WebsocketCollector.send(GsonUtils.getInstance().toJson(configData), eventType); }
@Test public void testOnAppAuthChanged() { String message = "{\"groupType\":\"APP_AUTH\",\"eventType\":\"UPDATE\",\"data\":[{\"appKey\":" + "\"D9FD95F496C9495DB5604778A13C3D08\",\"appSecret\":\"02D25048AA1E466F8920E68B08E668DE\"," + "\"enabled\":true,\"paramDataList\":[{\"appName\":\"axiba\",\"appParam\":\"123\"}]" + ",\"pathDataList\":[{\"appName\":\"alibaba\",\"path\":\"/1\",\"enabled\":true}]}]}"; MockedStatic.Verification verification = () -> WebsocketCollector.send(message, DataEventTypeEnum.UPDATE); try (MockedStatic<WebsocketCollector> mockedStatic = mockStatic(WebsocketCollector.class)) { mockedStatic.when(verification).thenAnswer((Answer<Void>) invocation -> null); websocketDataChangedListener.onAppAuthChanged(appAuthDataList, DataEventTypeEnum.UPDATE); mockedStatic.verify(verification); } }
@Override public UpsertTarget create(ExpressionEvalContext evalContext) { var classFilter = evalContext.getNodeEngine().getSqlService().getReflectionClassNameFilter(); return new PojoUpsertTarget(className, typeNamesByPaths, classFilter); }
@Test public void test_create() { PojoUpsertTargetDescriptor descriptor = new PojoUpsertTargetDescriptor(Object.class.getName(), emptyMap()); ExpressionEvalContext evalContextMock = mock(Answers.RETURNS_MOCKS); NodeEngine nodeEngine = mock(Answers.RETURNS_MOCKS); when(evalContextMock.getNodeEngine()).thenReturn(nodeEngine); // when UpsertTarget target = descriptor.create(evalContextMock); // then assertThat(target).isInstanceOf(PojoUpsertTarget.class); }
@POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response createRouter(InputStream input) throws IOException { log.trace(String.format(MESSAGE_ROUTER, "CREATE")); String inputStr = IOUtils.toString(input, REST_UTF8); if (!haService.isActive() && !DEFAULT_ACTIVE_IP_ADDRESS.equals(haService.getActiveIp())) { return syncPost(haService, ROUTERS, inputStr); } final NeutronRouter osRouter = (NeutronRouter) jsonToModelEntity(inputStr, NeutronRouter.class); adminService.createRouter(osRouter); UriBuilder locationBuilder = uriInfo.getBaseUriBuilder() .path(ROUTERS) .path(osRouter.getId()); return created(locationBuilder.build()).build(); }
@Test public void testCreateRouterWithDuplicatedId() { expect(mockOpenstackHaService.isActive()).andReturn(true).anyTimes(); replay(mockOpenstackHaService); mockOpenstackRouterAdminService.createRouter(anyObject()); expectLastCall().andThrow(new IllegalArgumentException()); replay(mockOpenstackRouterAdminService); final WebTarget wt = target(); InputStream jsonStream = OpenstackRouterWebResourceTest.class .getResourceAsStream("openstack-router.json"); Response response = wt.path(PATH).request(MediaType.APPLICATION_JSON_TYPE) .post(Entity.json(jsonStream)); final int status = response.getStatus(); assertThat(status, is(400)); verify(mockOpenstackRouterAdminService); }
public boolean isNewerThan(JavaSpecVersion otherVersion) { return this.compareTo(otherVersion) > 0; }
@Test public void test11newerThan8() throws Exception { // Setup fixture. final JavaSpecVersion eight = new JavaSpecVersion( "1.8" ); final JavaSpecVersion eleven = new JavaSpecVersion( "11" ); // Execute system under test. final boolean result = eleven.isNewerThan( eight ); // Verify results. assertTrue( result ); }
@Override public void write(MutableSpan span, WriteBuffer b) { b.writeByte('{'); boolean wroteField = false; if (span.traceId() != null) { wroteField = writeFieldBegin(b, "traceId", wroteField); b.writeByte('"'); b.writeAscii(span.traceId()); b.writeByte('"'); } if (span.parentId() != null) { wroteField = writeFieldBegin(b, "parentId", wroteField); b.writeByte('"'); b.writeAscii(span.parentId()); b.writeByte('"'); } if (span.id() != null) { wroteField = writeFieldBegin(b, "id", wroteField); b.writeByte('"'); b.writeAscii(span.id()); b.writeByte('"'); } if (span.kind() != null) { wroteField = writeFieldBegin(b, "kind", wroteField); b.writeByte('"'); b.writeAscii(span.kind().toString()); b.writeByte('"'); } if (span.name() != null) { wroteField = writeFieldBegin(b, "name", wroteField); b.writeByte('"'); jsonEscape(span.name(), b); b.writeByte('"'); } long startTimestamp = span.startTimestamp(), finishTimestamp = span.finishTimestamp(); if (startTimestamp != 0L) { wroteField = writeFieldBegin(b, "timestamp", wroteField); b.writeAscii(startTimestamp); if (finishTimestamp != 0L) { wroteField = writeFieldBegin(b, "duration", wroteField); b.writeAscii(finishTimestamp - startTimestamp); } } if (span.localServiceName() != null || span.localIp() != null) { wroteField = writeFieldBegin(b, "localEndpoint", wroteField); writeEndpoint(b, span.localServiceName(), span.localIp(), span.localPort()); } if (span.remoteServiceName() != null || span.remoteIp() != null) { wroteField = writeFieldBegin(b, "remoteEndpoint", wroteField); writeEndpoint(b, span.remoteServiceName(), span.remoteIp(), span.remotePort()); } int annotationLength = span.annotationCount(); if (annotationLength > 0) { wroteField = writeFieldBegin(b, "annotations", wroteField); b.writeByte('['); for (int i = 0; i < annotationLength; ) { long timestamp = span.annotationTimestampAt(i); String value = span.annotationValueAt(i); writeAnnotation(timestamp, value, b); if (++i < annotationLength) b.writeByte(','); } b.writeByte(']'); } int tagCount = span.tagCount(); String errorValue = errorTag.value(span.error(), null); String errorTagName = errorValue != null ? errorTag.key() : null; boolean writeError = errorTagName != null; if (tagCount > 0 || writeError) { wroteField = writeFieldBegin(b, "tags", wroteField); b.writeByte('{'); for (int i = 0; i < tagCount; ) { String key = span.tagKeyAt(i); if (writeError && key.equals(errorTagName)) writeError = false; writeKeyValue(b, key, span.tagValueAt(i)); if (++i < tagCount) b.writeByte(','); } if (writeError) { if (tagCount > 0) b.writeByte(','); writeKeyValue(b, errorTagName, errorValue); } b.writeByte('}'); } if (Boolean.TRUE.equals(span.debug())) { wroteField = writeFieldBegin(b, "debug", wroteField); b.writeAscii("true"); } if (Boolean.TRUE.equals(span.shared())) { writeFieldBegin(b, "shared", wroteField); b.writeAscii("true"); } b.writeByte('}'); }
@Test void missingFields_testCases() { jsonWriter.write(MutableSpanTest.PERMUTATIONS.get(0).get(), buffer); assertThat(buffer.toString()).isEqualTo("{}"); // check for simple bugs for (int i = 1, length = MutableSpanTest.PERMUTATIONS.size(); i < length; i++) { buffer.pos = 0; MutableSpan span = MutableSpanTest.PERMUTATIONS.get(i).get(); jsonWriter.write(span, buffer); assertThat(buffer.toString()) .doesNotContain("null") .doesNotContain(":0"); } }
@Restricted(NoExternalUse.class) public static String getHexOfSHA256DigestOf(byte[] input) throws IOException { //get hex string of sha 256 of payload byte[] payloadDigest = Util.getSHA256DigestOf(input); return (payloadDigest != null) ? Util.toHexString(payloadDigest) : null; }
@Test public void testGetHexOfSHA256DigestOf() throws IOException { byte[] input = new byte[] {12, 34, 16}; String str = Util.getHexOfSHA256DigestOf(input); assertEquals(str, "134fefbd329986726407a5208107ef07c9e33da779f5068bff191733268fe997"); }
@GET @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Get prekey count", description = "Gets the number of one-time prekeys uploaded for this device and still available") @ApiResponse(responseCode = "200", description = "Body contains the number of available one-time prekeys for the device.", useReturnTypeSchema = true) @ApiResponse(responseCode = "401", description = "Account authentication check failed.") public CompletableFuture<PreKeyCount> getStatus(@ReadOnly @Auth final AuthenticatedDevice auth, @QueryParam("identity") @DefaultValue("aci") final IdentityType identityType) { final CompletableFuture<Integer> ecCountFuture = keysManager.getEcCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId()); final CompletableFuture<Integer> pqCountFuture = keysManager.getPqCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId()); return ecCountFuture.thenCombine(pqCountFuture, PreKeyCount::new); }
@Test void putKeysPqTestV2() { final ECPreKey preKey = KeysHelper.ecPreKey(31337); final ECSignedPreKey signedPreKey = KeysHelper.signedECPreKey(31338, AuthHelper.VALID_IDENTITY_KEY_PAIR); final KEMSignedPreKey pqPreKey = KeysHelper.signedKEMPreKey(31339, AuthHelper.VALID_IDENTITY_KEY_PAIR); final KEMSignedPreKey pqLastResortPreKey = KeysHelper.signedKEMPreKey(31340, AuthHelper.VALID_IDENTITY_KEY_PAIR); final SetKeysRequest setKeysRequest = new SetKeysRequest(List.of(preKey), signedPreKey, List.of(pqPreKey), pqLastResortPreKey); Response response = resources.getJerseyTest() .target("/v2/keys") .request() .header("Authorization", AuthHelper.getAuthHeader(AuthHelper.VALID_UUID, AuthHelper.VALID_PASSWORD)) .put(Entity.entity(setKeysRequest, MediaType.APPLICATION_JSON_TYPE)); assertThat(response.getStatus()).isEqualTo(204); ArgumentCaptor<List<ECPreKey>> ecCaptor = ArgumentCaptor.forClass(List.class); ArgumentCaptor<List<KEMSignedPreKey>> pqCaptor = ArgumentCaptor.forClass(List.class); verify(KEYS).storeEcOneTimePreKeys(eq(AuthHelper.VALID_UUID), eq(SAMPLE_DEVICE_ID), ecCaptor.capture()); verify(KEYS).storeKemOneTimePreKeys(eq(AuthHelper.VALID_UUID), eq(SAMPLE_DEVICE_ID), pqCaptor.capture()); verify(KEYS).storePqLastResort(AuthHelper.VALID_UUID, SAMPLE_DEVICE_ID, pqLastResortPreKey); assertThat(ecCaptor.getValue()).containsExactly(preKey); assertThat(pqCaptor.getValue()).containsExactly(pqPreKey); verify(KEYS).storeEcSignedPreKeys(AuthHelper.VALID_UUID, AuthHelper.VALID_DEVICE.getId(), signedPreKey); }
@Override public void calculate(TradePriceCalculateReqBO param, TradePriceCalculateRespBO result) { // 0. 只有【普通】订单,才计算该优惠 if (ObjectUtil.notEqual(result.getType(), TradeOrderTypeEnum.NORMAL.getType())) { return; } // 获得 SKU 对应的满减送活动 List<RewardActivityMatchRespDTO> rewardActivities = rewardActivityApi.getMatchRewardActivityList( convertSet(result.getItems(), TradePriceCalculateRespBO.OrderItem::getSpuId)); if (CollUtil.isEmpty(rewardActivities)) { return; } // 处理每个满减送活动 rewardActivities.forEach(rewardActivity -> calculate(param, result, rewardActivity)); }
@Test public void testCalculate_match() { // 准备参数 TradePriceCalculateReqBO param = new TradePriceCalculateReqBO() .setItems(asList( new TradePriceCalculateReqBO.Item().setSkuId(10L).setCount(2).setSelected(true), // 匹配活动 1 new TradePriceCalculateReqBO.Item().setSkuId(20L).setCount(3).setSelected(true), // 匹配活动 1 new TradePriceCalculateReqBO.Item().setSkuId(30L).setCount(4).setSelected(true) // 匹配活动 2 )); TradePriceCalculateRespBO result = new TradePriceCalculateRespBO() .setType(TradeOrderTypeEnum.NORMAL.getType()) .setPrice(new TradePriceCalculateRespBO.Price()) .setPromotions(new ArrayList<>()) .setItems(asList( new TradePriceCalculateRespBO.OrderItem().setSkuId(10L).setCount(2).setSelected(true) .setPrice(100).setSpuId(1L), new TradePriceCalculateRespBO.OrderItem().setSkuId(20L).setCount(3).setSelected(true) .setPrice(50).setSpuId(2L), new TradePriceCalculateRespBO.OrderItem().setSkuId(30L).setCount(4).setSelected(true) .setPrice(30).setSpuId(3L) )); // 保证价格被初始化上 TradePriceCalculatorHelper.recountPayPrice(result.getItems()); TradePriceCalculatorHelper.recountAllPrice(result); // mock 方法(限时折扣 DiscountActivity 信息) when(rewardActivityApi.getMatchRewardActivityList(eq(asSet(1L, 2L, 3L)))).thenReturn(asList( randomPojo(RewardActivityMatchRespDTO.class, o -> o.setId(1000L).setName("活动 1000 号") .setSpuIds(asList(1L, 2L)).setConditionType(PromotionConditionTypeEnum.PRICE.getType()) .setRules(singletonList(new RewardActivityMatchRespDTO.Rule().setLimit(200).setDiscountPrice(70)))), randomPojo(RewardActivityMatchRespDTO.class, o -> o.setId(2000L).setName("活动 2000 号") .setSpuIds(singletonList(3L)).setConditionType(PromotionConditionTypeEnum.COUNT.getType()) .setRules(asList(new RewardActivityMatchRespDTO.Rule().setLimit(1).setDiscountPrice(10), new RewardActivityMatchRespDTO.Rule().setLimit(2).setDiscountPrice(60), // 最大可满足,因为是 4 个 new RewardActivityMatchRespDTO.Rule().setLimit(10).setDiscountPrice(100)))) )); // 调用 tradeRewardActivityPriceCalculator.calculate(param, result); // 断言 Order 部分 TradePriceCalculateRespBO.Price price = result.getPrice(); assertEquals(price.getTotalPrice(), 470); assertEquals(price.getDiscountPrice(), 130); assertEquals(price.getPointPrice(), 0); assertEquals(price.getDeliveryPrice(), 0); assertEquals(price.getCouponPrice(), 0); assertEquals(price.getPayPrice(), 340); assertNull(result.getCouponId()); // 断言:SKU 1 assertEquals(result.getItems().size(), 3); TradePriceCalculateRespBO.OrderItem orderItem01 = result.getItems().get(0); assertEquals(orderItem01.getSkuId(), 10L); assertEquals(orderItem01.getCount(), 2); assertEquals(orderItem01.getPrice(), 100); assertEquals(orderItem01.getDiscountPrice(), 40); assertEquals(orderItem01.getDeliveryPrice(), 0); assertEquals(orderItem01.getCouponPrice(), 0); assertEquals(orderItem01.getPointPrice(), 0); assertEquals(orderItem01.getPayPrice(), 160); // 断言:SKU 2 TradePriceCalculateRespBO.OrderItem orderItem02 = result.getItems().get(1); assertEquals(orderItem02.getSkuId(), 20L); assertEquals(orderItem02.getCount(), 3); assertEquals(orderItem02.getPrice(), 50); assertEquals(orderItem02.getDiscountPrice(), 30); assertEquals(orderItem02.getDeliveryPrice(), 0); assertEquals(orderItem02.getCouponPrice(), 0); assertEquals(orderItem02.getPointPrice(), 0); assertEquals(orderItem02.getPayPrice(), 120); // 断言:SKU 3 TradePriceCalculateRespBO.OrderItem orderItem03 = result.getItems().get(2); assertEquals(orderItem03.getSkuId(), 30L); assertEquals(orderItem03.getCount(), 4); assertEquals(orderItem03.getPrice(), 30); assertEquals(orderItem03.getDiscountPrice(), 60); assertEquals(orderItem03.getDeliveryPrice(), 0); assertEquals(orderItem03.getCouponPrice(), 0); assertEquals(orderItem03.getPointPrice(), 0); assertEquals(orderItem03.getPayPrice(), 60); // 断言:Promotion 部分(第一个) assertEquals(result.getPromotions().size(), 2); TradePriceCalculateRespBO.Promotion promotion01 = result.getPromotions().get(0); assertEquals(promotion01.getId(), 1000L); assertEquals(promotion01.getName(), "活动 1000 号"); assertEquals(promotion01.getType(), PromotionTypeEnum.REWARD_ACTIVITY.getType()); assertEquals(promotion01.getTotalPrice(), 350); assertEquals(promotion01.getDiscountPrice(), 70); assertTrue(promotion01.getMatch()); assertEquals(promotion01.getDescription(), "满减送:省 0.70 元"); assertEquals(promotion01.getItems().size(), 2); TradePriceCalculateRespBO.PromotionItem promotionItem011 = promotion01.getItems().get(0); assertEquals(promotionItem011.getSkuId(), 10L); assertEquals(promotionItem011.getTotalPrice(), 200); assertEquals(promotionItem011.getDiscountPrice(), 40); TradePriceCalculateRespBO.PromotionItem promotionItem012 = promotion01.getItems().get(1); assertEquals(promotionItem012.getSkuId(), 20L); assertEquals(promotionItem012.getTotalPrice(), 150); assertEquals(promotionItem012.getDiscountPrice(), 30); // 断言:Promotion 部分(第二个) TradePriceCalculateRespBO.Promotion promotion02 = result.getPromotions().get(1); assertEquals(promotion02.getId(), 2000L); assertEquals(promotion02.getName(), "活动 2000 号"); assertEquals(promotion02.getType(), PromotionTypeEnum.REWARD_ACTIVITY.getType()); assertEquals(promotion02.getTotalPrice(), 120); assertEquals(promotion02.getDiscountPrice(), 60); assertTrue(promotion02.getMatch()); assertEquals(promotion02.getDescription(), "满减送:省 0.60 元"); TradePriceCalculateRespBO.PromotionItem promotionItem02 = promotion02.getItems().get(0); assertEquals(promotion02.getItems().size(), 1); assertEquals(promotionItem02.getSkuId(), 30L); assertEquals(promotionItem02.getTotalPrice(), 120); assertEquals(promotionItem02.getDiscountPrice(), 60); }
public static NamespaceName get(String tenant, String namespace) { validateNamespaceName(tenant, namespace); return get(tenant + '/' + namespace); }
@Test(expectedExceptions = IllegalArgumentException.class) public void namespace_invalidFormat() { NamespaceName.get("namespace"); }
@Override public String getTenantId() { Tenant tenant = tenantId.get(); return tenant != null ? tenant.tenantId() : null; }
@Test void getTenantId() { assertThat(underTest.getTenantId()).isNull(); underTest.setTenantId("acme"); assertThat(underTest.getTenantId()).isEqualTo("acme"); underTest.setTenantId("muppets"); assertThat(underTest.getTenantId()).isEqualTo("muppets"); underTest.clearTenantId(); assertThat(underTest.getTenantId()).isNull(); }
@Override protected ExecuteContext doBefore(ExecuteContext context) { LogUtils.printHttpRequestBeforePoint(context); Request request = (Request) context.getObject(); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.log(Level.FINE, "Request''s classloader is {0}, jettyClientWrapper''s classloader is {1}.", new Object[]{Request.class.getClassLoader().getClass().getName(), JettyClientWrapper.class.getClassLoader().getClass().getName()}); } if (!(request instanceof JettyClientWrapper)) { return context; } String url = request.getScheme() + HttpConstants.HTTP_URL_DOUBLE_SLASH + request.getHost() + request.getPath(); Map<String, String> urlInfo = RequestInterceptorUtils.recoverUrl(url); RequestInterceptorUtils.printRequestLog("webClient(jetty)", urlInfo); Optional<Object> result = invokerService.invoke( invokerContext -> buildInvokerFunc(context, invokerContext, request, urlInfo.get(HttpConstants.HTTP_URI_PATH)), ex -> ex, urlInfo.get(HttpConstants.HTTP_URI_SERVICE)); if (result.isPresent()) { Object obj = result.get(); if (obj instanceof Exception) { LOGGER.log(Level.SEVERE, "Webclient(jetty) request is error, url is " + url, (Exception) obj); context.setThrowableOut((Exception) obj); return context; } } // The method returns void context.skip(null); return context; }
@Test public void testException() { // Test for anomalies JettyClientWrapper wrapper = Mockito.spy(new JettyClientWrapper(Mockito.mock(HttpClient.class), new HttpConversation(), HELLO_URI)); ReflectUtils.setFieldValue(wrapper, HttpConstants.HTTP_URI_HOST, "www.domain.com"); ExecuteContext context = ExecuteContext.forMemberMethod(wrapper, method, arguments, null, null); Mockito.doThrow(new RuntimeException()).when(wrapper).send(Mockito.isA(CompleteListener.class)); interceptor.doBefore(context); Assert.assertEquals(RuntimeException.class, context.getThrowableOut().getClass()); }
@Operation(summary = "list", description = "List jobs") @Parameters({ @Parameter(in = ParameterIn.QUERY, name = "pageNum", schema = @Schema(type = "integer", defaultValue = "1")), @Parameter(in = ParameterIn.QUERY, name = "pageSize", schema = @Schema(type = "integer", defaultValue = "10")), @Parameter(in = ParameterIn.QUERY, name = "orderBy", schema = @Schema(type = "string", defaultValue = "id")), @Parameter( in = ParameterIn.QUERY, name = "sort", description = "asc/desc", schema = @Schema(type = "string", defaultValue = "asc")) }) @GetMapping public ResponseEntity<PageVO<JobVO>> list(@PathVariable Long clusterId) { return ResponseEntity.success(jobService.list(clusterId)); }
@Test void listReturnsAllJobs() { Long clusterId = 1L; PageVO<JobVO> jobs = PageVO.of(Arrays.asList(new JobVO(), new JobVO()), 2L); when(jobService.list(clusterId)).thenReturn(jobs); ResponseEntity<PageVO<JobVO>> response = jobController.list(clusterId); assertTrue(response.isSuccess()); assertEquals(jobs, response.getData()); }
public static boolean hasUtmProperties(JSONObject properties) { if (properties == null) { return false; } for (Map.Entry<String, String> entry : UTM_MAP.entrySet()) { if (entry != null) { if (properties.has(entry.getValue())) { return true; } } } return false; }
@Test public void hasUtmProperties() { JSONObject jsonObject = new JSONObject(); Assert.assertFalse(ChannelUtils.hasUtmProperties(jsonObject)); try { jsonObject.put("$utm_source", "huawei"); jsonObject.put("$utm_medium", "yingyong"); Assert.assertTrue(ChannelUtils.hasUtmProperties(jsonObject)); } catch (JSONException e) { e.printStackTrace(); } }
@Override public int deletePermanently(UUID id) { try (final Connection conn = dataSource.getConnection(); final Transaction transaction = new Transaction(conn)) { final int amountDeleted = jobTable(conn).deletePermanently(id); transaction.commit(); notifyJobStatsOnChangeListenersIf(amountDeleted > 0); return amountDeleted; } catch (SQLException e) { throw new StorageException(e); } }
@Test void deletePermanently_WhenSqlExceptionOccursAJobStorageExceptionIsThrown() throws SQLException { doThrow(new SQLException("Boem")).when(preparedStatement).executeUpdate(); assertThatThrownBy(() -> jobStorageProvider.deletePermanently(randomUUID())).isInstanceOf(StorageException.class); }
public Schema addToSchema(Schema schema) { validate(schema); schema.addProp(LOGICAL_TYPE_PROP, name); schema.setLogicalType(this); return schema; }
@Test void decimalScaleBoundedByPrecision() { final Schema schema = Schema.createFixed("aDecimal", null, null, 4); assertThrows("Should reject precision", IllegalArgumentException.class, "Invalid decimal scale: 10 (greater than precision: 9)", () -> { LogicalTypes.decimal(9, 10).addToSchema(schema); return null; }); assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema"); }
public int getBlockX() { return position.blockX(); }
@Test public void testGetBlockX() throws Exception { World world = mock(World.class); Location location = new Location(world, Vector3.at(TEST_VALUE, 0, 0)); assertEquals(TEST_VALUE, location.getBlockX()); }
@Override public void output(String text) { stringBuilder.append(prefixes.pop()).append(text); prefixes.push(", "); }
@Test public void testOutput() { NodeStringifier stringifier = new NodeStringifier(); stringifier.output("testing 123"); stringifier.output("again"); assertEquals("testing 123, again", stringifier.toString()); }
@Override public void dump(OutputStream output) { try (PrintWriter out = new PrintWriter(new OutputStreamWriter(output, UTF_8))) { for (long value : values) { out.printf("%d%n", value); } } }
@Test public void dumpsToAStream() { final ByteArrayOutputStream output = new ByteArrayOutputStream(); snapshot.dump(output); assertThat(output.toString()) .isEqualTo(String.format("1%n2%n3%n4%n5%n")); }
@Override public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest)req; HttpServletResponse response = (HttpServletResponse)res; // Do not allow framing; OF-997 response.setHeader("X-Frame-Options", JiveGlobals.getProperty("adminConsole.frame-options", "SAMEORIGIN")); // Reset the defaultLoginPage variable String loginPage = defaultLoginPage; if (loginPage == null) { loginPage = request.getContextPath() + (AuthFactory.isOneTimeAccessTokenEnabled() ? "/loginToken.jsp" : "/login.jsp" ); } // Get the page we're on: String url = request.getRequestURI().substring(1); if (url.startsWith("plugins/")) { url = url.substring("plugins/".length()); } // See if it's contained in the exclude list. If so, skip filter execution boolean doExclude = false; for (String exclude : excludes) { if (testURLPassesExclude(url, exclude)) { doExclude = true; break; } } if (!doExclude || IP_ACCESS_IGNORE_EXCLUDES.getValue()) { if (!passesBlocklist(req) || !passesAllowList(req)) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } } if (!doExclude) { WebManager manager = new WebManager(); manager.init(request, response, request.getSession(), context); boolean haveOneTimeToken = manager.getAuthToken() instanceof AuthToken.OneTimeAuthToken; User loggedUser = manager.getUser(); boolean loggedAdmin = loggedUser == null ? false : adminManager.isUserAdmin(loggedUser.getUsername(), true); if (!haveOneTimeToken && !loggedAdmin && !authUserFromRequest(request)) { response.sendRedirect(getRedirectURL(request, loginPage, null)); return; } } chain.doFilter(req, res); }
@Test public void willRedirectARequestWithoutAServletRequestAuthenticator() throws Exception { final AuthCheckFilter filter = new AuthCheckFilter(adminManager, loginLimitManager); filter.doFilter(request, response, filterChain); verify(response).sendRedirect(anyString()); }
@Override public Integer clusterGetSlotForKey(byte[] key) { RFuture<Integer> f = executorService.readAsync((String)null, StringCodec.INSTANCE, RedisCommands.KEYSLOT, key); return syncFuture(f); }
@Test public void testClusterGetSlotForKey() { Integer slot = connection.clusterGetSlotForKey("123".getBytes()); assertThat(slot).isNotNull(); }
public JerseyClientBuilder using(JerseyClientConfiguration configuration) { this.configuration = configuration; apacheHttpClientBuilder.using(configuration); return this; }
@Test void usesACustomDnsResolver() { final DnsResolver customDnsResolver = new SystemDefaultDnsResolver(); builder.using(customDnsResolver); verify(apacheHttpClientBuilder).using(customDnsResolver); }
public static NotificationDispatcherMetadata newMetadata() { return METADATA; }
@Test public void myNewIssues_notification_is_enable_at_project_level() { NotificationDispatcherMetadata metadata = NewIssuesNotificationHandler.newMetadata(); assertThat(metadata.getProperty(PER_PROJECT_NOTIFICATION)).isEqualTo("true"); }
public static boolean isAssociatedWithVM(OpenstackNetworkService service, NetFloatingIP fip) { Port osPort = service.port(fip.getPortId()); if (osPort == null) { return false; } if (!Strings.isNullOrEmpty(osPort.getDeviceId())) { Network osNet = service.network(osPort.getNetworkId()); if (osNet == null) { final String errorFormat = ERR_FLOW + "no network(%s) exists"; final String error = String.format(errorFormat, fip.getFloatingIpAddress(), osPort.getNetworkId()); throw new IllegalStateException(error); } return true; } else { return false; } }
@Test public void testIsAssociatedWithVM() { OpenstackNetworkService service = new TestOpenstackNetworkService(); NetFloatingIP floatingIp4 = new NeutronFloatingIP().toBuilder().portId("portId4").build(); assertFalse(isAssociatedWithVM(service, floatingIp4)); assertFalse(isAssociatedWithVM(service, floatingIp3)); assertTrue(isAssociatedWithVM(service, floatingIp1)); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatOuterJoin() { final Join join = new Join(leftAlias, ImmutableList.of(new JoinedSource( Optional.empty(), rightAlias, JoinedSource.Type.OUTER, criteria, Optional.of(new WithinExpression(10, TimeUnit.SECONDS))))); final String expected = "`left` L\nFULL OUTER JOIN `right` R WITHIN 10 SECONDS ON" + " (('left.col0' = 'right.col0'))"; assertEquals(expected, SqlFormatter.formatSql(join)); }
@Override public CiConfiguration loadConfiguration() { String revision = system.envVariable(PROPERTY_COMMIT); if (isEmpty(revision)) { LOG.warn("Missing environment variable " + PROPERTY_COMMIT); } String githubRepository = system.envVariable(GITHUB_REPOSITORY_ENV_VAR); String githubApiUrl = system.envVariable(GITHUB_API_URL_ENV_VAR); if (isEmpty(githubRepository) || isEmpty(githubApiUrl)) { LOG.warn("Missing or empty environment variables: {}, and/or {}", GITHUB_API_URL_ENV_VAR, GITHUB_REPOSITORY_ENV_VAR); return new CiConfigurationImpl(revision, getName()); } return new CiConfigurationImpl(revision, getName(), new DevOpsPlatformInfo(githubApiUrl, githubRepository)); }
@Test public void loadConfiguration_whenMissingGitHubEnvironmentVariables_shouldLogWarn() { setEnvVariable("GITHUB_ACTION", "build"); assertThat(underTest.loadConfiguration().getDevOpsPlatformInfo()).isEmpty(); assertThat(logs.logs(Level.WARN)).contains("Missing or empty environment variables: GITHUB_API_URL, and/or GITHUB_REPOSITORY"); }
public void validate(AlmSettingDto almSettingDto) { String gitlabUrl = almSettingDto.getUrl(); String accessToken = almSettingDto.getDecryptedPersonalAccessToken(encryption); validate(ValidationMode.COMPLETE, gitlabUrl, accessToken); }
@Test public void validate_forAuthOnly_onlyValidatesUrl() { underTest.validate(AUTH_ONLY, GITLAB_API_URL, null); verify(gitlabHttpClient).checkUrl(GITLAB_API_URL); }
public int getCssid() { return ccsid; }
@Test public void testDefaultCcsid() { assertEquals(-1, jt400Configuration.getCssid()); }
static CatalogLoader createCatalogLoader( String name, Map<String, String> properties, Configuration hadoopConf) { String catalogImpl = properties.get(CatalogProperties.CATALOG_IMPL); if (catalogImpl != null) { String catalogType = properties.get(ICEBERG_CATALOG_TYPE); Preconditions.checkArgument( catalogType == null, "Cannot create catalog %s, both catalog-type and catalog-impl are set: catalog-type=%s, catalog-impl=%s", name, catalogType, catalogImpl); return CatalogLoader.custom(name, properties, hadoopConf, catalogImpl); } String catalogType = properties.getOrDefault(ICEBERG_CATALOG_TYPE, ICEBERG_CATALOG_TYPE_HIVE); switch (catalogType.toLowerCase(Locale.ENGLISH)) { case ICEBERG_CATALOG_TYPE_HIVE: // The values of properties 'uri', 'warehouse', 'hive-conf-dir' are allowed to be null, in // that case it will // fallback to parse those values from hadoop configuration which is loaded from classpath. String hiveConfDir = properties.get(HIVE_CONF_DIR); String hadoopConfDir = properties.get(HADOOP_CONF_DIR); Configuration newHadoopConf = mergeHiveConf(hadoopConf, hiveConfDir, hadoopConfDir); return CatalogLoader.hive(name, newHadoopConf, properties); case ICEBERG_CATALOG_TYPE_HADOOP: return CatalogLoader.hadoop(name, hadoopConf, properties); case ICEBERG_CATALOG_TYPE_REST: return CatalogLoader.rest(name, hadoopConf, properties); default: throw new UnsupportedOperationException( "Unknown catalog-type: " + catalogType + " (Must be 'hive', 'hadoop' or 'rest')"); } }
@Test public void testCreateCatalogCustom() { String catalogName = "customCatalog"; props.put(CatalogProperties.CATALOG_IMPL, CustomHadoopCatalog.class.getName()); Catalog catalog = FlinkCatalogFactory.createCatalogLoader(catalogName, props, new Configuration()) .loadCatalog(); assertThat(catalog).isNotNull().isInstanceOf(CustomHadoopCatalog.class); }
public Optional<Object> get(String path) { if (path == null || path.trim().isEmpty()) { throw new IllegalArgumentException(String.format("path [%s] is invalid", path)); } path = validatePath(path); if (path.equals("/")) { return Optional.of(map); } String[] pathTokens = path.split(Pattern.quote("/")); Object object = map; for (int i = 1; i < pathTokens.length; i++) { try { object = resolve(pathTokens[i], object); } catch (NullPointerException | ClassCastException e) { return Optional.empty(); } } return Optional.ofNullable(object); }
@Test public void testValidPaths() throws IOException { YamlMapAccessor yamlMapAccessor = createYamlMapAccessor("/YamlMapAccessorTest.yaml"); Optional<Object> optional = yamlMapAccessor.get("/"); assertNotNull(optional); assertTrue(optional.isPresent()); assertNotNull(optional.get()); assertTrue(optional.get() instanceof Map); optional = yamlMapAccessor.get("/httpServer"); assertNotNull(optional); assertTrue(optional.isPresent()); assertNotNull(optional.get()); assertTrue(optional.get() instanceof Map); optional = yamlMapAccessor.get("/httpServer/authentication"); assertNotNull(optional); assertTrue(optional.isPresent()); assertNotNull(optional.get()); assertTrue(optional.get() instanceof Map); optional = yamlMapAccessor.get("/httpServer/authentication/basic"); assertNotNull(optional); assertTrue(optional.isPresent()); assertNotNull(optional.get()); assertTrue(optional.get() instanceof Map); optional = yamlMapAccessor.get("/httpServer/authentication/basic/username"); assertNotNull(optional); assertTrue(optional.isPresent()); assertNotNull(optional.get()); assertTrue(optional.get() instanceof String); assertEquals("Prometheus", optional.get()); optional = yamlMapAccessor.get("/httpServer/authentication/basic/password"); assertNotNull(optional); assertTrue(optional.isPresent()); assertNotNull(optional.get()); assertTrue(optional.get() instanceof String); assertEquals( "c6d52fc2733af33e62b45d4525261e35e04f7b0ec227e4feee8fd3fe1401a2a9", optional.get()); optional = yamlMapAccessor.get("/httpServer/threads"); assertNotNull(optional); assertTrue(optional.isPresent()); assertNotNull(optional.get()); assertTrue(optional.get() instanceof Map); optional = yamlMapAccessor.get("/httpServer/threads/minimum"); assertNotNull(optional); assertTrue(optional.isPresent()); assertNotNull(optional.get()); assertTrue(optional.get() instanceof Integer); assertEquals(1, ((Integer) optional.get()).intValue()); optional = yamlMapAccessor.get("/httpServer/threads/maximum"); assertNotNull(optional); assertTrue(optional.isPresent()); assertNotNull(optional.get()); assertTrue(optional.get() instanceof Integer); assertEquals(10, ((Integer) optional.get()).intValue()); optional = yamlMapAccessor.get("/httpServer/threads/keepAlive"); assertNotNull(optional); assertTrue(optional.isPresent()); assertNotNull(optional.get()); assertTrue(optional.get() instanceof Integer); assertEquals(120, ((Integer) optional.get()).intValue()); }
@Override protected void encodeInitialLine(ByteBuf buf, HttpRequest request) throws Exception { ByteBufUtil.copy(request.method().asciiName(), buf); String uri = request.uri(); if (uri.isEmpty()) { // Add " / " as absolute path if uri is not present. // See https://tools.ietf.org/html/rfc2616#section-5.1.2 ByteBufUtil.writeMediumBE(buf, SPACE_SLASH_AND_SPACE_MEDIUM); } else { CharSequence uriCharSequence = uri; boolean needSlash = false; int start = uri.indexOf("://"); if (start != -1 && uri.charAt(0) != SLASH) { start += 3; // Correctly handle query params. // See https://github.com/netty/netty/issues/2732 int index = uri.indexOf(QUESTION_MARK, start); if (index == -1) { if (uri.lastIndexOf(SLASH) < start) { needSlash = true; } } else { if (uri.lastIndexOf(SLASH, index) < start) { uriCharSequence = new StringBuilder(uri).insert(index, SLASH); } } } buf.writeByte(SP).writeCharSequence(uriCharSequence, CharsetUtil.UTF_8); if (needSlash) { // write "/ " after uri ByteBufUtil.writeShortBE(buf, SLASH_AND_SPACE_SHORT); } else { buf.writeByte(SP); } } request.protocolVersion().encode(buf); ByteBufUtil.writeShortBE(buf, CRLF_SHORT); }
@Test public void testAbsPath() throws Exception { for (ByteBuf buffer : getBuffers()) { HttpRequestEncoder encoder = new HttpRequestEncoder(); encoder.encodeInitialLine(buffer, new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/")); String req = buffer.toString(Charset.forName("US-ASCII")); assertEquals("GET / HTTP/1.1\r\n", req); buffer.release(); } }
public Map<String, String> getRestServiceAnnotations() { return flinkConfig .getOptional(KubernetesConfigOptions.REST_SERVICE_ANNOTATIONS) .orElse(Collections.emptyMap()); }
@Test void testGetRestServiceAnnotations() { final Map<String, String> expectedAnnotations = new HashMap<>(); expectedAnnotations.put("a1", "v1"); expectedAnnotations.put("a2", "v2"); flinkConfig.set(KubernetesConfigOptions.REST_SERVICE_ANNOTATIONS, expectedAnnotations); final Map<String, String> resultAnnotations = kubernetesJobManagerParameters.getRestServiceAnnotations(); assertThat(resultAnnotations).isEqualTo(expectedAnnotations); }
public static Row toBeamRow(GenericRecord record, Schema schema, ConversionOptions options) { List<Object> valuesInOrder = schema.getFields().stream() .map( field -> { try { org.apache.avro.Schema.Field avroField = record.getSchema().getField(field.getName()); Object value = avroField != null ? record.get(avroField.pos()) : null; return convertAvroFormat(field.getType(), value, options); } catch (Exception cause) { throw new IllegalArgumentException( "Error converting field " + field + ": " + cause.getMessage(), cause); } }) .collect(toList()); return Row.withSchema(schema).addValues(valuesInOrder).build(); }
@Test public void testToBeamRow_avro_array_array_row() { Row flatRowExpected = Row.withSchema(AVRO_FLAT_TYPE).addValues(123L, 123.456, "test", false).build(); Row arrayRowExpected = Row.withSchema(AVRO_ARRAY_TYPE).addValues((Object) Arrays.asList(flatRowExpected)).build(); Row expected = Row.withSchema(AVRO_ARRAY_ARRAY_TYPE) .addValues((Object) Arrays.asList(arrayRowExpected)) .build(); GenericData.Record arrayRecord = new GenericData.Record(AvroUtils.toAvroSchema(AVRO_ARRAY_TYPE)); GenericData.Record flat = new GenericData.Record(AvroUtils.toAvroSchema(AVRO_FLAT_TYPE)); GenericData.Record record = new GenericData.Record(AvroUtils.toAvroSchema(AVRO_ARRAY_ARRAY_TYPE)); flat.put("id", 123L); flat.put("value", 123.456); flat.put("name", "test"); flat.put("valid", false); arrayRecord.put("rows", Arrays.asList(flat)); record.put("array_rows", Arrays.asList(arrayRecord)); Row beamRow = BigQueryUtils.toBeamRow( record, AVRO_ARRAY_ARRAY_TYPE, BigQueryUtils.ConversionOptions.builder().build()); assertEquals(expected, beamRow); }
@Override public int compareTo(DateTimeStamp dateTimeStamp) { return comparator.compare(this,dateTimeStamp); }
@Test void testCompareSmallerTimeStamp() { DateTimeStamp smaller = new DateTimeStamp("2018-04-04T10:10:00.586-0100", 123); DateTimeStamp greater = new DateTimeStamp("2018-04-04T10:10:00.586-0100", 124); assertEquals(-1, smaller.compareTo(greater)); }
private MessageRouter getMessageRouter() { MessageRouter messageRouter; MessageRoutingMode messageRouteMode = conf.getMessageRoutingMode(); switch (messageRouteMode) { case CustomPartition: messageRouter = Objects.requireNonNull(conf.getCustomMessageRouter()); break; case SinglePartition: messageRouter = new SinglePartitionMessageRouterImpl( ThreadLocalRandom.current().nextInt(topicMetadata.numPartitions()), conf.getHashingScheme()); break; case RoundRobinPartition: default: messageRouter = new RoundRobinPartitionMessageRouterImpl( conf.getHashingScheme(), ThreadLocalRandom.current().nextInt(topicMetadata.numPartitions()), conf.isBatchingEnabled(), TimeUnit.MICROSECONDS.toMillis(conf.batchingPartitionSwitchFrequencyIntervalMicros())); } return messageRouter; }
@Test public void testCustomMessageRouterInstance() throws NoSuchFieldException, IllegalAccessException { ProducerConfigurationData producerConfigurationData = new ProducerConfigurationData(); producerConfigurationData.setMessageRoutingMode(MessageRoutingMode.CustomPartition); producerConfigurationData.setCustomMessageRouter(new CustomMessageRouter()); MessageRouter messageRouter = getMessageRouter(producerConfigurationData); assertTrue(messageRouter instanceof CustomMessageRouter); }
@Override public PageResult<NoticeDO> getNoticePage(NoticePageReqVO reqVO) { return noticeMapper.selectPage(reqVO); }
@Test public void testGetNoticePage_success() { // 插入前置数据 NoticeDO dbNotice = randomPojo(NoticeDO.class, o -> { o.setTitle("尼古拉斯赵四来啦!"); o.setStatus(CommonStatusEnum.ENABLE.getStatus()); }); noticeMapper.insert(dbNotice); // 测试 title 不匹配 noticeMapper.insert(cloneIgnoreId(dbNotice, o -> o.setTitle("尼古拉斯凯奇也来啦!"))); // 测试 status 不匹配 noticeMapper.insert(cloneIgnoreId(dbNotice, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus()))); // 准备参数 NoticePageReqVO reqVO = new NoticePageReqVO(); reqVO.setTitle("尼古拉斯赵四来啦!"); reqVO.setStatus(CommonStatusEnum.ENABLE.getStatus()); // 调用 PageResult<NoticeDO> pageResult = noticeService.getNoticePage(reqVO); // 验证查询结果经过筛选 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbNotice, pageResult.getList().get(0)); }
@Override public long lastSeen() { return lastSeen; }
@Test public void testFlowBasedObject() { final DefaultFlowEntry entry = new DefaultFlowEntry(new IntentTestsMocks.MockFlowRule(1)); assertThat(entry.priority(), is(1)); assertThat(entry.appId(), is((short) 0)); assertThat(entry.lastSeen(), greaterThan(System.currentTimeMillis() - TimeUnit.MILLISECONDS.convert(1, TimeUnit.SECONDS))); }
public static File openFile(String path, String fileName) { return openFile(path, fileName, false); }
@Test void testOpenFileWithPath() { File file = DiskUtils.openFile(testFile.getParent(), testFile.getName(), false); assertNotNull(file); assertEquals(testFile.getPath(), file.getPath()); assertEquals(testFile.getName(), file.getName()); }
public String[] getArray(@NotNull final String key) { final String string = getString(key); if (string != null) { if (string.charAt(0) == '{' || string.charAt(0) == '[') { try { return objectMapper.readValue(string, String[].class); } catch (JsonProcessingException e) { throw new IllegalStateException("Unable to read value '" + string + "' as an array"); } } else { return string.split(ARRAY_SEP); } } return null; }
@Test public void testGetArrayWhereThePropertyIsNotSet() { // WHEN getting the array final String[] array = getSettings().getArray("key"); // THEN null is returned assertThat("Expected the array to be null", array, nullValue()); }
@Override public boolean test(Pair<Point, Point> pair) { return testVertical(pair) && testHorizontal(pair); }
@Test public void testHorizAndVertSeparation() { Point p1 = (new PointBuilder()).time(EPOCH).latLong(0.0, 0.0).altitude(Distance.ofFeet(1000.0)).build(); Point p2 = (new PointBuilder()).time(EPOCH).latLong(0.0, 1.0).altitude(Distance.ofFeet(2000.0)).build(); double MAX_HORIZ_SEPARATION_IN_FT = 1000; double MAX_VERT_SEPARATION = 500; CylindricalFilter filter = new CylindricalFilter(MAX_HORIZ_SEPARATION_IN_FT, MAX_VERT_SEPARATION, true); assertFalse(filter.test(Pair.of(p1, p2)), "A rejection holds when both dimensions fail the test"); }
public static String getRelativePath(Path sourceRootPath, Path childPath) { String childPathString = childPath.toUri().getPath(); String sourceRootPathString = sourceRootPath.toUri().getPath(); return sourceRootPathString.equals("/") ? childPathString : childPathString.substring(sourceRootPathString.length()); }
@Test public void testGetRelativePathRoot() { Path root = new Path("/"); Path child = new Path("/a"); assertThat(DistCpUtils.getRelativePath(root, child)).isEqualTo("/a"); }
public int doWork() { final long nowNs = nanoClock.nanoTime(); trackTime(nowNs); int workCount = 0; workCount += processTimers(nowNs); if (!asyncClientCommandInFlight) { workCount += clientCommandAdapter.receive(); } workCount += drainCommandQueue(); workCount += trackStreamPositions(workCount, nowNs); workCount += nameResolver.doWork(cachedEpochClock.time()); workCount += freeEndOfLifeResources(ctx.resourceFreeLimit()); return workCount; }
@Test void shouldBeAbleToAddSingleSpy() { final long id = driverProxy.addSubscription(spyForChannel(CHANNEL_4000), STREAM_ID_1); driverConductor.doWork(); verify(receiverProxy, never()).registerReceiveChannelEndpoint(any()); verify(receiverProxy, never()).addSubscription(any(), eq(STREAM_ID_1)); verify(mockClientProxy).onSubscriptionReady(eq(id), anyInt()); }
@Override public StatusOutputStream<ObjStat> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { try { final IRODSFileSystemAO fs = session.getClient(); final IRODSFileOutputStream out = fs.getIRODSFileFactory().instanceIRODSFileOutputStream( file.getAbsolute(), status.isAppend() ? DataObjInp.OpenFlags.READ_WRITE : DataObjInp.OpenFlags.WRITE_TRUNCATE); return new StatusOutputStream<ObjStat>(new PackingIrodsOutputStream(out)) { @Override public ObjStat getStatus() throws BackgroundException { // No remote attributes from server returned after upload try { return fs.getObjStat(file.getAbsolute()); } catch(JargonException e) { throw new IRODSExceptionMappingService().map("Failure to read attributes of {0}", e, file); } } }; } catch(JargonRuntimeException e) { if(e.getCause() instanceof JargonException) { throw (JargonException) e.getCause(); } throw new DefaultExceptionMappingService().map(e); } } catch(JargonException e) { throw new IRODSExceptionMappingService().map("Uploading {0} failed", e, file); } }
@Test public void testWriteConcurrent() throws Exception { final ProtocolFactory factory = new ProtocolFactory(new HashSet<>(Collections.singleton(new IRODSProtocol()))); final Profile profile = new ProfilePlistReader(factory).read( this.getClass().getResourceAsStream("/iRODS (iPlant Collaborative).cyberduckprofile")); final Host host = new Host(profile, profile.getDefaultHostname(), new Credentials( PROPERTIES.get("irods.key"), PROPERTIES.get("irods.secret") )); final IRODSSession session1 = new IRODSSession(host); session1.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session1.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final IRODSSession session2 = new IRODSSession(host); session2.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session2.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final Path test1 = new Path(new IRODSHomeFinderService(session1).find(), UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); final Path test2 = new Path(new IRODSHomeFinderService(session2).find(), UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); final byte[] content = RandomUtils.nextBytes(68400); final OutputStream out1 = new IRODSWriteFeature(session1).write(test1, new TransferStatus().append(false).withLength(content.length), new DisabledConnectionCallback()); final OutputStream out2 = new IRODSWriteFeature(session2).write(test2, new TransferStatus().append(false).withLength(content.length), new DisabledConnectionCallback()); new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out2); // Error code received from iRODS:-23000 new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out1); { final InputStream in1 = session1.getFeature(Read.class).read(test1, new TransferStatus(), new DisabledConnectionCallback()); final byte[] buffer1 = new byte[content.length]; IOUtils.readFully(in1, buffer1); in1.close(); assertArrayEquals(content, buffer1); } { final InputStream in2 = session2.getFeature(Read.class).read(test2, new TransferStatus(), new DisabledConnectionCallback()); final byte[] buffer2 = new byte[content.length]; IOUtils.readFully(in2, buffer2); in2.close(); assertArrayEquals(content, buffer2); } session1.close(); session2.close(); }
static DeduplicationResult ensureSingleProducer( QueryablePipeline pipeline, Collection<ExecutableStage> stages, Collection<PipelineNode.PTransformNode> unfusedTransforms) { RunnerApi.Components.Builder unzippedComponents = pipeline.getComponents().toBuilder(); Multimap<PipelineNode.PCollectionNode, StageOrTransform> pcollectionProducers = getProducers(pipeline, stages, unfusedTransforms); Multimap<StageOrTransform, PipelineNode.PCollectionNode> requiresNewOutput = HashMultimap.create(); // Create a synthetic PCollection for each of these nodes. The transforms in the runner // portion of the graph that creates them should be replaced in the result components. The // ExecutableStage must also be rewritten to have updated outputs and transforms. for (Map.Entry<PipelineNode.PCollectionNode, Collection<StageOrTransform>> collectionProducer : pcollectionProducers.asMap().entrySet()) { if (collectionProducer.getValue().size() > 1) { for (StageOrTransform producer : collectionProducer.getValue()) { requiresNewOutput.put(producer, collectionProducer.getKey()); } } } Map<ExecutableStage, ExecutableStage> updatedStages = new LinkedHashMap<>(); Map<String, PipelineNode.PTransformNode> updatedTransforms = new LinkedHashMap<>(); Multimap<String, PipelineNode.PCollectionNode> originalToPartial = HashMultimap.create(); for (Map.Entry<StageOrTransform, Collection<PipelineNode.PCollectionNode>> deduplicationTargets : requiresNewOutput.asMap().entrySet()) { if (deduplicationTargets.getKey().getStage() != null) { StageDeduplication deduplication = deduplicatePCollections( deduplicationTargets.getKey().getStage(), deduplicationTargets.getValue(), unzippedComponents::containsPcollections); for (Entry<String, PipelineNode.PCollectionNode> originalToPartialReplacement : deduplication.getOriginalToPartialPCollections().entrySet()) { originalToPartial.put( originalToPartialReplacement.getKey(), originalToPartialReplacement.getValue()); unzippedComponents.putPcollections( originalToPartialReplacement.getValue().getId(), originalToPartialReplacement.getValue().getPCollection()); } updatedStages.put( deduplicationTargets.getKey().getStage(), deduplication.getUpdatedStage()); } else if (deduplicationTargets.getKey().getTransform() != null) { PTransformDeduplication deduplication = deduplicatePCollections( deduplicationTargets.getKey().getTransform(), deduplicationTargets.getValue(), unzippedComponents::containsPcollections); for (Entry<String, PipelineNode.PCollectionNode> originalToPartialReplacement : deduplication.getOriginalToPartialPCollections().entrySet()) { originalToPartial.put( originalToPartialReplacement.getKey(), originalToPartialReplacement.getValue()); unzippedComponents.putPcollections( originalToPartialReplacement.getValue().getId(), originalToPartialReplacement.getValue().getPCollection()); } updatedTransforms.put( deduplicationTargets.getKey().getTransform().getId(), deduplication.getUpdatedTransform()); } else { throw new IllegalStateException( String.format( "%s with no %s or %s", StageOrTransform.class.getSimpleName(), ExecutableStage.class.getSimpleName(), PipelineNode.PTransformNode.class.getSimpleName())); } } Set<PipelineNode.PTransformNode> introducedFlattens = new LinkedHashSet<>(); for (Map.Entry<String, Collection<PipelineNode.PCollectionNode>> partialFlattenTargets : originalToPartial.asMap().entrySet()) { String flattenId = SyntheticComponents.uniqueId("unzipped_flatten", unzippedComponents::containsTransforms); PTransform flattenPartialPCollections = createFlattenOfPartials( flattenId, partialFlattenTargets.getKey(), partialFlattenTargets.getValue()); unzippedComponents.putTransforms(flattenId, flattenPartialPCollections); introducedFlattens.add(PipelineNode.pTransform(flattenId, flattenPartialPCollections)); } Components components = unzippedComponents.build(); return DeduplicationResult.of(components, introducedFlattens, updatedStages, updatedTransforms); }
@Test public void unchangedWithNoDuplicates() { /* When all the PCollections are produced by only one transform or stage, the result should be * empty/identical to the input. * * Pipeline: * /-> one -> .out \ * red -> .out -> -> blue -> .out * \-> two -> .out / */ PCollection redOut = PCollection.newBuilder().setUniqueName("red.out").build(); PTransform red = PTransform.newBuilder() .setSpec( FunctionSpec.newBuilder() .setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN) .build()) .putOutputs("out", redOut.getUniqueName()) .build(); PCollection oneOut = PCollection.newBuilder().setUniqueName("one.out").build(); PTransform one = PTransform.newBuilder() .setSpec( FunctionSpec.newBuilder() .setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN) .build()) .putInputs("in", redOut.getUniqueName()) .putOutputs("out", oneOut.getUniqueName()) .build(); PCollection twoOut = PCollection.newBuilder().setUniqueName("two.out").build(); PTransform two = PTransform.newBuilder() .setSpec( FunctionSpec.newBuilder() .setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN) .build()) .putInputs("in", redOut.getUniqueName()) .putOutputs("out", twoOut.getUniqueName()) .build(); PCollection blueOut = PCollection.newBuilder().setUniqueName("blue.out").build(); PTransform blue = PTransform.newBuilder() .setSpec( FunctionSpec.newBuilder() .setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN) .build()) .putInputs("one", oneOut.getUniqueName()) .putInputs("two", twoOut.getUniqueName()) .putOutputs("out", blueOut.getUniqueName()) .build(); RunnerApi.Components components = Components.newBuilder() .putTransforms("one", one) .putPcollections(oneOut.getUniqueName(), oneOut) .putTransforms("two", two) .putPcollections(twoOut.getUniqueName(), twoOut) .putTransforms("red", red) .putPcollections(redOut.getUniqueName(), redOut) .putTransforms("blue", blue) .putPcollections(blueOut.getUniqueName(), blueOut) .build(); ExecutableStage oneStage = ImmutableExecutableStage.of( components, Environment.getDefaultInstance(), PipelineNode.pCollection(redOut.getUniqueName(), redOut), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(PipelineNode.pTransform("one", one)), ImmutableList.of(PipelineNode.pCollection(oneOut.getUniqueName(), oneOut)), DEFAULT_WIRE_CODER_SETTINGS); ExecutableStage twoStage = ImmutableExecutableStage.of( components, Environment.getDefaultInstance(), PipelineNode.pCollection(redOut.getUniqueName(), redOut), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(), ImmutableList.of(PipelineNode.pTransform("two", two)), ImmutableList.of(PipelineNode.pCollection(twoOut.getUniqueName(), twoOut)), DEFAULT_WIRE_CODER_SETTINGS); PTransformNode redTransform = PipelineNode.pTransform("red", red); PTransformNode blueTransform = PipelineNode.pTransform("blue", blue); QueryablePipeline pipeline = QueryablePipeline.forPrimitivesIn(components); OutputDeduplicator.DeduplicationResult result = OutputDeduplicator.ensureSingleProducer( pipeline, ImmutableList.of(oneStage, twoStage), ImmutableList.of(redTransform, blueTransform)); assertThat(result.getDeduplicatedComponents(), equalTo(components)); assertThat(result.getDeduplicatedStages().keySet(), empty()); assertThat(result.getDeduplicatedTransforms().keySet(), empty()); assertThat(result.getIntroducedTransforms(), empty()); }
@Override public boolean isEmpty() { return sideInputs.isEmpty(); }
@Test public void testIsEmptyTrue() { // Create an empty handler SideInputHandler emptySideInputHandler = new SideInputHandler(ImmutableList.of(), InMemoryStateInternals.<Void>forKey(null)); assertTrue(emptySideInputHandler.isEmpty()); }
@Override public Iterable<RedisClusterNode> clusterGetNodes() { return read(null, StringCodec.INSTANCE, CLUSTER_NODES); }
@Test public void testClusterGetNodes() { Iterable<RedisClusterNode> nodes = connection.clusterGetNodes(); assertThat(nodes).hasSize(6); for (RedisClusterNode redisClusterNode : nodes) { assertThat(redisClusterNode.getLinkState()).isNotNull(); assertThat(redisClusterNode.getFlags()).isNotEmpty(); assertThat(redisClusterNode.getHost()).isNotNull(); assertThat(redisClusterNode.getPort()).isNotNull(); assertThat(redisClusterNode.getId()).isNotNull(); assertThat(redisClusterNode.getType()).isNotNull(); if (redisClusterNode.getType() == NodeType.MASTER) { assertThat(redisClusterNode.getSlotRange().getSlots()).isNotEmpty(); } else { assertThat(redisClusterNode.getMasterId()).isNotNull(); } } }
@Override public byte[] compress(byte[] src, int level) throws IOException { byte[] result = src; ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(src.length); ZstdOutputStream outputStream = new ZstdOutputStream(byteArrayOutputStream, level); try { outputStream.write(src); outputStream.flush(); outputStream.close(); result = byteArrayOutputStream.toByteArray(); } catch (IOException e) { log.error("Failed to compress data by zstd", e); throw e; } finally { try { byteArrayOutputStream.close(); } catch (IOException ignored) { } } return result; }
@Test public void testCompressWithInvalidData() throws IOException { byte[] invalidData = new byte[] {-1, -1, -1, -1}; ZstdCompressor compressor = new ZstdCompressor(); compressor.compress(invalidData, 1); }
@Override public KeyValueIterator<Windowed<Bytes>, byte[]> findSessions(final Bytes key, final long earliestSessionEndTime, final long latestSessionStartTime) { return wrapped().findSessions(key, earliestSessionEndTime, latestSessionStartTime); }
@Test public void shouldDelegateToUnderlyingStoreWhenFindingSessionRange() { store.findSessions(bytesKey, bytesKey, 0, 1); verify(inner).findSessions(bytesKey, bytesKey, 0, 1); }
@Override public int numPartitions() { return partitions; }
@Test public void testNumPartitions() { Map<HoodieFileGroupId, Long> comparisons1 = new HashMap<HoodieFileGroupId, Long>() { { IntStream.range(0, 10).forEach(f -> put(new HoodieFileGroupId("p1", "f" + f), 100L)); } }; BucketizedBloomCheckPartitioner p = new BucketizedBloomCheckPartitioner(10000, comparisons1, 10); assertEquals(100, p.numPartitions(), "num partitions must equal total buckets"); }
@Override public List<OptExpression> transform(OptExpression input, OptimizerContext context) { LogicalOlapScanOperator logicalOlapScanOperator = (LogicalOlapScanOperator) input.getOp(); LogicalOlapScanOperator prunedOlapScanOperator = null; if (logicalOlapScanOperator.getSelectedPartitionId() == null) { prunedOlapScanOperator = OptOlapPartitionPruner.prunePartitions(logicalOlapScanOperator); } else { // do merge pruned partitions with new pruned partitions prunedOlapScanOperator = OptOlapPartitionPruner.mergePartitionPrune(logicalOlapScanOperator); } Utils.setOpAppliedRule(prunedOlapScanOperator, Operator.OP_PARTITION_PRUNE_BIT); return Lists.newArrayList(OptExpression.create(prunedOlapScanOperator, input.getInputs())); }
@Test public void transformForSingleItemListPartitionWithTemp(@Mocked OlapTable olapTable, @Mocked ListPartitionInfo partitionInfo) throws AnalysisException { FeConstants.runningUnitTest = true; ColumnRefFactory columnRefFactory = new ColumnRefFactory(); ColumnRefOperator column = columnRefFactory.create("province", ScalarType.STRING, false); Map<ColumnRefOperator, Column> scanColumnMap = Maps.newHashMap(); scanColumnMap.put(column, new Column("province", Type.STRING, false)); Map<Column, ColumnRefOperator> columnMetaToColRefMap = new HashMap<>(); columnMetaToColRefMap.put(new Column(column.getName(), column.getType()), new ColumnRefOperator(1, column.getType(), column.getName(), false)); PartitionNames partitionNames = new PartitionNames(true, Lists.newArrayList("p1", "p2")); LogicalOlapScanOperator operator = new LogicalOlapScanOperator(olapTable, scanColumnMap, columnMetaToColRefMap, null, -1, null, olapTable.getBaseIndexId(), null, partitionNames, false, Lists.newArrayList(), Lists.newArrayList(), null, false); Partition part1 = new Partition(10001L, "p1", null, null); Partition part2 = new Partition(10002L, "p2", null, null); List<LiteralExpr> p1 = Lists.newArrayList( new PartitionValue("guangdong").getValue(Type.STRING), new PartitionValue("shanghai").getValue(Type.STRING)); List<LiteralExpr> p2 = Lists.newArrayList( new PartitionValue("beijing").getValue(Type.STRING), new PartitionValue("chongqing").getValue(Type.STRING)); Map<Long, List<LiteralExpr>> literalExprValues = new HashMap<>(); literalExprValues.put(10001L, p1); literalExprValues.put(10002L, p2); List<ColumnId> partitionColumns = Lists.newArrayList(ColumnId.create("province")); new Expectations() { { olapTable.getPartitionInfo(); result = partitionInfo; partitionInfo.getType(); result = PartitionType.LIST; partitionInfo.getLiteralExprValues(); result = literalExprValues; olapTable.getPartitions(); result = Lists.newArrayList(part1, part2); minTimes = 0; partitionInfo.getPartitionColumns((Map<ColumnId, Column>) any); result = Lists.newArrayList(new Column("province", Type.STRING, false)); minTimes = 0; partitionInfo.getPartitionIds(true); result = Lists.newArrayList(10001L); minTimes = 0; olapTable.getPartition(10001L); result = part1; minTimes = 0; olapTable.getPartition(10002L); result = part2; minTimes = 0; olapTable.getPartition("p1", true); result = part1; minTimes = 0; olapTable.getPartition("p2", true); result = null; minTimes = 0; } }; PartitionPruneRule rule = new PartitionPruneRule(); assertNull(operator.getSelectedPartitionId()); OptExpression optExpression = rule.transform(new OptExpression(operator), new OptimizerContext(new Memo(), columnRefFactory)).get(0); List<Long> selectPartitionIds = ((LogicalOlapScanOperator) optExpression.getOp()).getSelectedPartitionId(); assertEquals(1, selectPartitionIds.size()); long actual = selectPartitionIds.get(0); assertEquals(10001L, actual); }
@Override public void close() throws IOException { GZIPInputStream gis = (GZIPInputStream) delegate; gis.close(); }
@Test public void testClose() throws IOException { inStream = new GZIPCompressionInputStream( createGZIPInputStream(), provider ) { }; inStream.close(); }
@Override public T peekFirst() { if (_head == null) { return null; } return _head._value; }
@Test public void testPeekFirst() { List<Integer> control = new ArrayList<>(Arrays.asList(1, 2, 3)); LinkedDeque<Integer> q = new LinkedDeque<>(control); Assert.assertEquals(q.peekFirst(), control.get(0)); Assert.assertEquals(q, control); }
public TrackKern(int degree, float minPointSize, float minKern, float maxPointSize, float maxKern) { this.degree = degree; this.minPointSize = minPointSize; this.minKern = minKern; this.maxPointSize = maxPointSize; this.maxKern = maxKern; }
@Test void testTrackKern() { TrackKern trackKern = new TrackKern(0, 1.0f, 1.0f, 10.0f, 10.0f); assertEquals(0, trackKern.getDegree()); assertEquals(1.0f, trackKern.getMinPointSize(), 0.0f); assertEquals(1.0f, trackKern.getMinKern(), 0.0f); assertEquals(10.0f, trackKern.getMaxPointSize(), 0.0f); assertEquals(10.0f, trackKern.getMaxKern(), 0.0f); }
public void setTemplateEntriesForChild(CapacitySchedulerConfiguration conf, QueuePath childQueuePath) { setTemplateEntriesForChild(conf, childQueuePath, false); }
@Test public void testRootTemplate() { conf.set(getTemplateKey(ROOT, "capacity"), "2w"); AutoCreatedQueueTemplate template = new AutoCreatedQueueTemplate(conf, ROOT); template.setTemplateEntriesForChild(conf, TEST_QUEUE_A); Assert.assertEquals("root property is not set", 2f, conf.getNonLabeledQueueWeight(TEST_QUEUE_A), 10e-6); }
private static void proxyFactory(XmlGenerator gen, List<ProxyFactoryConfig> proxyFactories) { if (proxyFactories.isEmpty()) { return; } gen.open("proxy-factories"); for (ProxyFactoryConfig proxyFactory : proxyFactories) { gen.node("proxy-factory", null, "class-name", classNameOrImplClass(proxyFactory.getClassName(), proxyFactory.getFactoryImpl()), "service", proxyFactory.getService()); } gen.close(); }
@Test public void proxyFactory() { ProxyFactoryConfig expected = new ProxyFactoryConfig(); expected.setClassName(randomString()) .setService(randomString()); clientConfig.addProxyFactoryConfig(expected); List<ProxyFactoryConfig> actual = newConfigViaGenerator().getProxyFactoryConfigs(); assertCollection(clientConfig.getProxyFactoryConfigs(), actual); }
public LinkedList<LinkedList<Node>> computeWeaklyConnectedComponents(Graph graph, HashMap<Node, Integer> indices) { int N = graph.getNodeCount(); //Keep track of which nodes have been seen int[] color = new int[N]; Progress.start(progress, N); int seenCount = 0; LinkedList<LinkedList<Node>> components = new LinkedList<>(); while (seenCount < N) { //The search Q LinkedList<Node> Q = new LinkedList<>(); //The component-list LinkedList<Node> component = new LinkedList<>(); //Seed the search Q NodeIterable iter = graph.getNodes(); for (Node next : iter) { if (color[indices.get(next)] == 0) { Q.add(next); iter.doBreak(); break; } } //While there are more nodes to search while (!Q.isEmpty()) { if (isCanceled) { return new LinkedList<>(); } //Get the next Node and add it to the component list Node u = Q.removeFirst(); component.add(u); color[indices.get(u)] = 2; //Iterate over all of u's neighbors EdgeIterable edgeIter = graph.getEdges(u); //For each neighbor for (Edge edge : edgeIter) { Node reachable = graph.getOpposite(u, edge); int id = indices.get(reachable); //If this neighbor is unvisited if (color[id] == 0) { //Mark it as used color[id] = 1; //Add it to the search Q Q.addLast(reachable); } } seenCount++; Progress.progress(progress, seenCount); } components.add(component); } return components; }
@Test public void testComputeSelfLoopNodeAndIsolatedNodeWeaklyConnectedComponents() { GraphModel graphModel = GraphModel.Factory.newInstance(); UndirectedGraph undirectedGraph = graphModel.getUndirectedGraph(); Node node1 = graphModel.factory().newNode("0"); Node node2 = graphModel.factory().newNode("1"); Node node3 = graphModel.factory().newNode("2"); undirectedGraph.addNode(node1); undirectedGraph.addNode(node2); undirectedGraph.addNode(node3); Edge edge11 = graphModel.factory().newEdge(node1, node1, false); Edge edge12 = graphModel.factory().newEdge(node1, node2, false); undirectedGraph.addEdge(edge11); undirectedGraph.addEdge(edge12); ConnectedComponents c = new ConnectedComponents(); HashMap<Node, Integer> indices = new HashMap<>(); indices.put(node1, 0); indices.put(node2, 1); indices.put(node3, 2); LinkedList<LinkedList<Node>> components = c.computeWeaklyConnectedComponents(undirectedGraph, indices); assertEquals(components.size(), 2); }
public void recordInit(long duration) { initTimeSensor.record(duration); }
@Test public void shouldRecordInitTime() { // When: producerMetrics.recordInit(METRIC_VALUE); // Then: assertMetricValue(TXN_INIT_TIME_TOTAL); }
@Override public Collection<SchemaMetaData> load(final MetaDataLoaderMaterial material) throws SQLException { Collection<TableMetaData> tableMetaDataList = new LinkedList<>(); try (Connection connection = new MetaDataLoaderConnection(TypedSPILoader.getService(DatabaseType.class, "Oracle"), material.getDataSource().getConnection())) { tableMetaDataList.addAll(getTableMetaDataList(connection, connection.getSchema(), material.getActualTableNames())); } return Collections.singletonList(new SchemaMetaData(material.getDefaultSchemaName(), tableMetaDataList)); }
@Test void assertLoadCondition5() throws SQLException { DataSource dataSource = mockDataSource(); ResultSet resultSet = mockTableMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(ALL_TAB_COLUMNS_SQL_CONDITION5).executeQuery()).thenReturn(resultSet); ResultSet indexResultSet = mockIndexMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(ALL_INDEXES_SQL).executeQuery()).thenReturn(indexResultSet); when(dataSource.getConnection().getMetaData().getUserName()).thenReturn("TEST"); ResultSet primaryKeys = mockPrimaryKeysMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(ALL_CONSTRAINTS_SQL_WITH_TABLES).executeQuery()).thenReturn(primaryKeys); when(dataSource.getConnection().getMetaData().getDatabaseMajorVersion()).thenReturn(12); when(dataSource.getConnection().getMetaData().getDatabaseMinorVersion()).thenReturn(1); Collection<SchemaMetaData> actual = getDialectTableMetaDataLoader().load(new MetaDataLoaderMaterial(Collections.singleton("tbl"), dataSource, new OracleDatabaseType(), "sharding_db")); assertTableMetaDataMap(actual); TableMetaData actualTableMetaData = actual.iterator().next().getTables().iterator().next(); Iterator<ColumnMetaData> columnsIterator = actualTableMetaData.getColumns().iterator(); assertThat(columnsIterator.next(), is(new ColumnMetaData("id", Types.INTEGER, true, true, false, true, false, false))); assertThat(columnsIterator.next(), is(new ColumnMetaData("name", Types.VARCHAR, false, false, false, false, false, true))); }
@Override public Integer clusterGetSlotForKey(byte[] key) { RFuture<Integer> f = executorService.readAsync((String)null, StringCodec.INSTANCE, RedisCommands.KEYSLOT, key); return syncFuture(f); }
@Test public void testClusterGetSlotForKey() { Integer slot = connection.clusterGetSlotForKey("123".getBytes()); assertThat(slot).isNotNull(); }
public Timer getExecutionTimer(String streamId, String streamRuleId) { Timer timer = this.streamExecutionTimers.get(streamRuleId); if (timer == null) { timer = metricRegistry.timer(MetricRegistry.name(Stream.class, streamId, "StreamRule", streamRuleId, "executionTime")); this.streamExecutionTimers.put(streamRuleId, timer); } return timer; }
@Test public void getExecutionTimer() { final Timer timer = streamMetrics.getExecutionTimer("stream-id", "stream-rule-id"); assertThat(timer).isNotNull(); assertThat(metricRegistry.getTimers()) .containsKey("org.graylog2.plugin.streams.Stream.stream-id.StreamRule.stream-rule-id.executionTime"); }
@Override public Optional<String> getReturnTo(HttpRequest request) { return getParameter(request, RETURN_TO_PARAMETER) .flatMap(OAuth2AuthenticationParametersImpl::sanitizeRedirectUrl); }
@Test public void get_return_to_parameter() { when(request.getCookies()).thenReturn(new Cookie[]{wrapCookie(AUTHENTICATION_COOKIE_NAME, "{\"return_to\":\"/admin/settings\"}")}); Optional<String> redirection = underTest.getReturnTo(request); assertThat(redirection).contains("/admin/settings"); }
public List<KuduPredicate> convert(ScalarOperator operator) { if (operator == null) { return null; } return operator.accept(this, null); }
@Test public void testOr() { ConstantOperator intOp = ConstantOperator.createInt(5); ConstantOperator varcharOp = ConstantOperator.createVarchar("abc"); ScalarOperator gt = new BinaryPredicateOperator(BinaryType.GT, F0, intOp); ScalarOperator eq = new BinaryPredicateOperator(BinaryType.EQ, F1, varcharOp); ScalarOperator op = new CompoundPredicateOperator(CompoundPredicateOperator.CompoundType.OR, gt, eq); List<KuduPredicate> result = CONVERTER.convert(op); Assert.assertEquals(result.size(), 0); }
public int merge(final K key, final int value, final IntIntFunction remappingFunction) { requireNonNull(key); requireNonNull(remappingFunction); final int missingValue = this.missingValue; if (missingValue == value) { throw new IllegalArgumentException("cannot accept missingValue"); } final K[] keys = this.keys; final int[] values = this.values; @DoNotSub final int mask = values.length - 1; @DoNotSub int index = Hashing.hash(key, mask); int oldValue; while (missingValue != (oldValue = values[index])) { if (Objects.equals(keys[index], key)) { break; } index = ++index & mask; } final int newValue = missingValue == oldValue ? value : remappingFunction.apply(oldValue, value); if (missingValue != newValue) { keys[index] = key; values[index] = newValue; if (++size > resizeThreshold) { increaseCapacity(); } } else { keys[index] = null; values[index] = missingValue; --size; compactChain(index); } return newValue; }
@Test void mergeThrowsIllegalArgumentExceptionIfValueIsMissingValue() { final int missingValue = 555; final Object2IntHashMap<String> map = new Object2IntHashMap<>(missingValue); final IllegalArgumentException exception = assertThrowsExactly( IllegalArgumentException.class, () -> map.merge("key", missingValue, (v1, v2) -> 42)); assertEquals("cannot accept missingValue", exception.getMessage()); }
public static Builder builder() { return new Builder(); }
@Test(expected = IllegalStateException.class) public void shouldFailIfNullComponents() { Batch.builder() .setEnvironment(new EnvironmentInformation("Gradle", "1.0")) .setComponents(null) .build(); }
public static Estimate calculateFilterFactor(StatisticRange range, ConnectorHistogram histogram, Estimate totalDistinctValues, boolean useHeuristics) { boolean openHigh = range.getOpenHigh(); boolean openLow = range.getOpenLow(); Estimate min = histogram.inverseCumulativeProbability(0.0); Estimate max = histogram.inverseCumulativeProbability(1.0); // range is either above or below histogram if ((!max.isUnknown() && (openHigh ? max.getValue() <= range.getLow() : max.getValue() < range.getLow())) || (!min.isUnknown() && (openLow ? min.getValue() >= range.getHigh() : min.getValue() > range.getHigh()))) { return Estimate.of(0.0); } // one of the max/min bounds can't be determined if ((max.isUnknown() && !min.isUnknown()) || (!max.isUnknown() && min.isUnknown())) { // when the range length is 0, the filter factor should be 1/distinct value count if (!useHeuristics) { return Estimate.unknown(); } if (range.length() == 0.0) { return totalDistinctValues.map(distinct -> 1.0 / distinct); } if (isFinite(range.length())) { return Estimate.of(StatisticRange.INFINITE_TO_FINITE_RANGE_INTERSECT_OVERLAP_HEURISTIC_FACTOR); } return Estimate.of(StatisticRange.INFINITE_TO_INFINITE_RANGE_INTERSECT_OVERLAP_HEURISTIC_FACTOR); } // we know the bounds are both known, so calculate the percentile for each bound // The inclusivity arguments can be derived from the open-ness of the interval we're // calculating the filter factor for // e.g. given a variable with values in [0, 10] to calculate the filter of // [1, 9) (openness: false, true) we need the percentile from // [0.0 to 1.0) (openness: false, true) and from [0.0, 9.0) (openness: false, true) // thus for the "lowPercentile" calculation we should pass "false" to be non-inclusive // (same as openness) however, on the high-end we want the inclusivity to be the opposite // of the openness since if it's open, we _don't_ want to include the bound. Estimate lowPercentile = histogram.cumulativeProbability(range.getLow(), openLow); Estimate highPercentile = histogram.cumulativeProbability(range.getHigh(), !openHigh); // both bounds are probably infinity, use the infinite-infinite heuristic if (lowPercentile.isUnknown() || highPercentile.isUnknown()) { if (!useHeuristics) { return Estimate.unknown(); } // in the case the histogram has no values if (totalDistinctValues.equals(Estimate.zero()) || range.getDistinctValuesCount() == 0.0) { return Estimate.of(0.0); } // in the case only one is unknown if (((lowPercentile.isUnknown() && !highPercentile.isUnknown()) || (!lowPercentile.isUnknown() && highPercentile.isUnknown())) && isFinite(range.length())) { return Estimate.of(StatisticRange.INFINITE_TO_FINITE_RANGE_INTERSECT_OVERLAP_HEURISTIC_FACTOR); } if (range.length() == 0.0) { return totalDistinctValues.map(distinct -> 1.0 / distinct); } if (!isNaN(range.getDistinctValuesCount())) { return totalDistinctValues.map(distinct -> min(1.0, range.getDistinctValuesCount() / distinct)); } return Estimate.of(StatisticRange.INFINITE_TO_INFINITE_RANGE_INTERSECT_OVERLAP_HEURISTIC_FACTOR); } // in the case the range is a single value, this can occur if the input // filter range is a single value (low == high) OR in the case that the // bounds of the filter or this histogram are infinite. // in the case of infinite bounds, we should return an estimate that // correlates to the overlapping distinct values. if (lowPercentile.equals(highPercentile)) { if (!useHeuristics) { return Estimate.zero(); } return totalDistinctValues.map(distinct -> 1.0 / distinct); } // in the case that we return the entire range, the returned factor percent should be // proportional to the number of distinct values in the range if (lowPercentile.equals(Estimate.zero()) && highPercentile.equals(Estimate.of(1.0)) && min.isUnknown() && max.isUnknown()) { if (!useHeuristics) { return Estimate.unknown(); } return totalDistinctValues.flatMap(totalDistinct -> { if (DoubleMath.fuzzyEquals(totalDistinct, 0.0, 1E-6)) { return Estimate.of(1.0); } return Estimate.of(min(1.0, range.getDistinctValuesCount() / totalDistinct)); }) // in the case totalDistinct is NaN or 0 .or(() -> Estimate.of(StatisticRange.INFINITE_TO_INFINITE_RANGE_INTERSECT_OVERLAP_HEURISTIC_FACTOR)); } return lowPercentile.flatMap(lowPercent -> highPercentile.map(highPercent -> highPercent - lowPercent)); }
@Test public void testCalculateFilterFactor() { StatisticRange zeroToTen = range(0, 10, 10); StatisticRange empty = StatisticRange.empty(); // Equal ranges assertFilterFactor(Estimate.of(1.0), zeroToTen, uniformHist(0, 10), 5); assertFilterFactor(Estimate.of(1.0), zeroToTen, uniformHist(0, 10), 20); // Some overlap assertFilterFactor(Estimate.of(0.5), range(5, 3000, 5), uniformHist(zeroToTen), zeroToTen.getDistinctValuesCount()); // Single value overlap assertFilterFactor(Estimate.of(1.0 / zeroToTen.getDistinctValuesCount()), range(3, 3, 1), uniformHist(zeroToTen), zeroToTen.getDistinctValuesCount()); assertFilterFactor(Estimate.of(1.0 / zeroToTen.getDistinctValuesCount()), range(10, 100, 357), uniformHist(zeroToTen), zeroToTen.getDistinctValuesCount()); // No overlap assertFilterFactor(Estimate.zero(), range(20, 30, 10), uniformHist(zeroToTen), zeroToTen.getDistinctValuesCount()); // Empty ranges assertFilterFactor(Estimate.zero(), zeroToTen, uniformHist(empty), empty.getDistinctValuesCount()); assertFilterFactor(Estimate.zero(), empty, uniformHist(zeroToTen), zeroToTen.getDistinctValuesCount()); // no test for (empty, empty) since any return value is correct assertFilterFactor(Estimate.zero(), unboundedRange(10), uniformHist(empty), empty.getDistinctValuesCount()); assertFilterFactor(Estimate.zero(), empty, uniformHist(unboundedRange(10)), 10); // Unbounded (infinite), NDV-based assertFilterFactor(Estimate.of(0.5), unboundedRange(10), uniformHist(unboundedRange(20)), 20); assertFilterFactor(Estimate.of(1.0), unboundedRange(20), uniformHist(unboundedRange(10)), 10); // NEW TESTS (TPC-H Q2) // unbounded ranges assertFilterFactor(Estimate.of(.5), unboundedRange(0.5), uniformHist(unboundedRange(NaN)), NaN); // unbounded ranges with limited distinct values assertFilterFactor(Estimate.of(0.2), unboundedRange(1.0), domainConstrained(unboundedRange(5.0), uniformHist(unboundedRange(7.0))), 5.0); }
public static SemanticVersion parse(String version) throws SemanticVersionParseException { Matcher matcher = PATTERN.matcher(version); if (!matcher.matches()) { throw new SemanticVersionParseException("" + version + " does not match format " + FORMAT); } final int major; final int minor; final int patch; try { major = Integer.parseInt(matcher.group(1)); minor = Integer.parseInt(matcher.group(2)); patch = Integer.parseInt(matcher.group(3)); } catch (NumberFormatException e) { throw new SemanticVersionParseException(e); } final String unknown = matcher.group(4); final String prerelease = matcher.group(5); final String buildInfo = matcher.group(6); if (major < 0 || minor < 0 || patch < 0) { throw new SemanticVersionParseException( String.format("major(%d), minor(%d), and patch(%d) must all be >= 0", major, minor, patch)); } return new SemanticVersion(major, minor, patch, unknown, prerelease, buildInfo); }
@Test public void testParse() throws Exception { assertEquals(new SemanticVersion(1, 8, 0), SemanticVersion.parse("1.8.0")); assertEquals(new SemanticVersion(1, 8, 0, true), SemanticVersion.parse("1.8.0rc3")); assertEquals(new SemanticVersion(1, 8, 0, "rc3", "SNAPSHOT", null), SemanticVersion.parse("1.8.0rc3-SNAPSHOT")); assertEquals(new SemanticVersion(1, 8, 0, null, "SNAPSHOT", null), SemanticVersion.parse("1.8.0-SNAPSHOT")); assertEquals(new SemanticVersion(1, 5, 0, null, "cdh5.5.0", null), SemanticVersion.parse("1.5.0-cdh5.5.0")); }
public static JsonElement parseString(String json) throws JsonSyntaxException { return parseReader(new StringReader(json)); }
@Test public void testParseEmptyWhitespaceInput() { JsonElement e = JsonParser.parseString(" "); assertThat(e.isJsonNull()).isTrue(); }
@Override public String getColumnName(final int column) { Preconditions.checkArgument(1 == column); return generatedKeyColumn; }
@Test void assertGetColumnName() throws SQLException { assertThat(actualMetaData.getColumnName(1), is("order_id")); }
@Override public AppResponse process(Flow flow, RequestAccountRequest request) { digidClient.remoteLog("3"); Map<String, Object> result = digidClient.createRegistration(request); if (result.get(lowerUnderscore(STATUS)).equals("NOK")) { if (result.get(ERROR) != null) { return new StartAccountRequestNokResponse((String) result.get(ERROR), result); } return new NokResponse(); } appSession = new AppSession(); appSession.setState(State.INITIALIZED.name()); appSession.setFlow(flow.getName()); appSession.setRegistrationId(Long.valueOf((Integer) result.get(lowerUnderscore(REGISTRATION_ID)))); appSession.setLanguage(request.getLanguage()); appSession.setNfcSupport(request.getNfcSupport()); if (!request.getNfcSupport()) { digidClient.remoteLog("1506", Map.of(lowerUnderscore(REGISTRATION_ID), appSession.getRegistrationId())); } digidClient.remoteLog("6", Map.of(lowerUnderscore(REGISTRATION_ID), appSession.getRegistrationId())); return new AppSessionResponse(appSession.getId(), Instant.now().getEpochSecond()); }
@Test void processOKTest(){ RequestAccountRequest requestAccountRequest = createRequest(); Long registrationId = 1337L; when(digidClientMock.createRegistration(requestAccountRequest)).thenReturn(Map.of( lowerUnderscore(STATUS), "OK", lowerUnderscore(REGISTRATION_ID), Math.toIntExact(registrationId) )); when(flowMock.getName()).thenReturn(RequestAccountAndAppFlow.NAME); AppResponse appResponse = startAccountRequest.process(flowMock, requestAccountRequest); verify(digidClientMock, times(1)).remoteLog("3"); verify(digidClientMock, times(1)).remoteLog("6", ImmutableMap.of(lowerUnderscore(REGISTRATION_ID), registrationId)); verify(digidClientMock, times(1)).remoteLog("1506", ImmutableMap.of(lowerUnderscore(REGISTRATION_ID), registrationId)); assertTrue(appResponse instanceof AppSessionResponse); assertNotNull(((AppSessionResponse) appResponse).getAppSessionId()); assertEquals(State.INITIALIZED.name(), startAccountRequest.getAppSession().getState()); assertEquals(RequestAccountAndAppFlow.NAME, startAccountRequest.getAppSession().getFlow()); assertEquals(registrationId, startAccountRequest.getAppSession().getRegistrationId()); assertEquals("NL", startAccountRequest.getAppSession().getLanguage()); assertFalse(startAccountRequest.getAppSession().isNfcSupport()); }
public static <T extends Model> CompilationUnit getKiePMMLModelCompilationUnit(final DroolsCompilationDTO<T> droolsCompilationDTO, final String javaTemplate, final String modelClassName) { logger.trace("getKiePMMLModelCompilationUnit {} {} {}", droolsCompilationDTO.getFields(), droolsCompilationDTO.getModel(), droolsCompilationDTO.getPackageName()); String className = droolsCompilationDTO.getSimpleClassName(); CompilationUnit cloneCU = JavaParserUtils.getKiePMMLModelCompilationUnit(className, droolsCompilationDTO.getPackageName(), javaTemplate, modelClassName); ClassOrInterfaceDeclaration modelTemplate = cloneCU.getClassByName(className) .orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + className)); MINING_FUNCTION miningFunction = droolsCompilationDTO.getMINING_FUNCTION(); final ConstructorDeclaration constructorDeclaration = modelTemplate.getDefaultConstructor().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_DEFAULT_CONSTRUCTOR, modelTemplate.getName()))); String targetField = droolsCompilationDTO.getTargetFieldName(); setConstructor(droolsCompilationDTO.getModel(), constructorDeclaration, modelTemplate.getName(), targetField, miningFunction, droolsCompilationDTO.getPackageName()); addFieldTypeMapPopulation(constructorDeclaration.getBody(), droolsCompilationDTO.getFieldTypeMap()); return cloneCU; }
@Test void getKiePMMLModelCompilationUnit() { DataDictionary dataDictionary = new DataDictionary(); String targetFieldString = "target.field"; String targetFieldName =targetFieldString; dataDictionary.addDataFields(new DataField(targetFieldName, OpType.CONTINUOUS, DataType.DOUBLE)); String modelName = "ModelName"; TreeModel model = new TreeModel(); model.setModelName(modelName); model.setMiningFunction(MiningFunction.CLASSIFICATION); MiningField targetMiningField = new MiningField(targetFieldName); targetMiningField.setUsageType(MiningField.UsageType.TARGET); MiningSchema miningSchema = new MiningSchema(); miningSchema.addMiningFields(targetMiningField); model.setMiningSchema(miningSchema); Map<String, KiePMMLOriginalTypeGeneratedType> fieldTypeMap = new HashMap<>(); fieldTypeMap.put(targetFieldString, new KiePMMLOriginalTypeGeneratedType(targetFieldString, getSanitizedClassName(targetFieldString))); String packageName = "net.test"; PMML pmml = new PMML(); pmml.setDataDictionary(dataDictionary); pmml.addModels(model); final CommonCompilationDTO<TreeModel> source = CommonCompilationDTO.fromGeneratedPackageNameAndFields(packageName, pmml, model, new PMMLCompilationContextMock(), "FILENAME"); final DroolsCompilationDTO<TreeModel> droolsCompilationDTO = DroolsCompilationDTO.fromCompilationDTO(source, fieldTypeMap); CompilationUnit retrieved = KiePMMLDroolsModelFactoryUtils.getKiePMMLModelCompilationUnit(droolsCompilationDTO, TEMPLATE_SOURCE, TEMPLATE_CLASS_NAME); assertThat(retrieved.getPackageDeclaration().get().getNameAsString()).isEqualTo(droolsCompilationDTO.getPackageName()); ConstructorDeclaration constructorDeclaration = retrieved.getClassByName(modelName).get().getDefaultConstructor().get(); MINING_FUNCTION miningFunction = MINING_FUNCTION.CLASSIFICATION; PMML_MODEL pmmlModel = PMML_MODEL.byName(model.getClass().getSimpleName()); Map<String, Expression> assignExpressionMap = new HashMap<>(); assignExpressionMap.put("targetField", new StringLiteralExpr(targetFieldString)); assignExpressionMap.put("miningFunction", new NameExpr(miningFunction.getClass().getName() + "." + miningFunction.name())); assignExpressionMap.put("pmmlMODEL", new NameExpr(pmmlModel.getClass().getName() + "." + pmmlModel.name())); String expectedKModulePackageName = getSanitizedPackageName(packageName + "." + modelName); assignExpressionMap.put("kModulePackageName", new StringLiteralExpr(expectedKModulePackageName)); assertThat(commonEvaluateAssignExpr(constructorDeclaration.getBody(), assignExpressionMap)).isTrue(); int expectedMethodCallExprs = assignExpressionMap.size() + fieldTypeMap.size() + 1; // The last "1" is for // the super invocation commonEvaluateFieldTypeMap(constructorDeclaration.getBody(), fieldTypeMap, expectedMethodCallExprs); }
@Override public PushConsumer attachQueue(final String queueName, final MessageListener listener) { this.subscribeTable.put(queueName, listener); try { this.rocketmqPushConsumer.subscribe(queueName, "*"); } catch (MQClientException e) { throw new OMSRuntimeException("-1", String.format("RocketMQ push consumer can't attach to %s.", queueName)); } return this; }
@Test public void testConsumeMessage() { final byte[] testBody = new byte[] {'a', 'b'}; MessageExt consumedMsg = new MessageExt(); consumedMsg.setMsgId("NewMsgId"); consumedMsg.setBody(testBody); consumedMsg.putUserProperty(NonStandardKeys.MESSAGE_DESTINATION, "TOPIC"); consumedMsg.setTopic("HELLO_QUEUE"); consumer.attachQueue("HELLO_QUEUE", new MessageListener() { @Override public void onReceived(Message message, Context context) { assertThat(message.sysHeaders().getString(Message.BuiltinKeys.MESSAGE_ID)).isEqualTo("NewMsgId"); assertThat(((BytesMessage) message).getBody(byte[].class)).isEqualTo(testBody); context.ack(); } }); ((MessageListenerConcurrently) rocketmqPushConsumer .getMessageListener()).consumeMessage(Collections.singletonList(consumedMsg), null); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { try { new EueAttributesFinderFeature(session, fileid).find(file); return true; } catch(NotfoundException e) { return false; } }
@Test public void testFindFile() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path file = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new EueTouchFeature(session, fileid).touch(file, new TransferStatus()); assertTrue(new EueFindFeature(session, fileid).find(file)); assertFalse(new EueFindFeature(session, fileid).find(new Path(file.getAbsolute(), EnumSet.of(Path.Type.directory)))); new EueDeleteFeature(session, fileid).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public boolean isTerminated() { return delegate.isTerminated(); }
@Test public void isTerminated_delegates_to_executorService() { underTest.isTerminated(); inOrder.verify(executorService).isTerminated(); inOrder.verifyNoMoreInteractions(); }
static Entry<String, String> splitTrimmedConfigStringComponent(String input) { int i; for (i = 0; i < input.length(); i++) { if (input.charAt(i) == '=') { break; } } if (i == input.length()) { throw new FormatterException("No equals sign found in SCRAM component: " + input); } String value = input.substring(i + 1); if (value.length() >= 2) { if (value.startsWith("\"") && value.endsWith("\"")) { value = value.substring(1, value.length() - 1); } } return new AbstractMap.SimpleImmutableEntry<>(input.substring(0, i), value); }
@Test public void testSplitTrimmedConfigStringComponentOnNameEqualsQuotedEmpty() { assertEquals(new AbstractMap.SimpleImmutableEntry<>("name", ""), ScramParser.splitTrimmedConfigStringComponent("name=\"\"")); }
@VisibleForTesting BackupReplayFile openOrCreateReplayFile() { try { final Optional<BackupReplayFile> backupReplayFile = latestReplayFile(); if (backupReplayFile.isPresent()) { return backupReplayFile.get(); } return newReplayFile(); } catch (final IOException e) { throw new RuntimeException(e); } }
@Test public void shouldOpenLatestReplayFileWhenOneExists() throws IOException { // Given: backupLocation.newFile("backup_command_topic_111"); // When: final BackupReplayFile replayFile = commandTopicBackup.openOrCreateReplayFile(); // Then: assertThat(replayFile.getPath(), is(String.format( "%s/backup_command_topic_111", backupLocation.getRoot().getAbsolutePath() ))); }
@Override public void registerInstance(String serviceName, String ip, int port) throws NacosException { registerInstance(serviceName, ip, port, Constants.DEFAULT_CLUSTER_NAME); }
@Test void testRegisterInstance2() throws NacosException { //given String serviceName = "service1"; String groupName = "group1"; String ip = "1.1.1.1"; int port = 10000; //when client.registerInstance(serviceName, groupName, ip, port); //then verify(proxy, times(1)).registerService(eq(serviceName), eq(groupName), argThat(instance -> instance.getIp().equals(ip) && instance.getPort() == port && Math.abs(instance.getWeight() - 1.0) < 0.01f && instance.getClusterName() .equals(Constants.DEFAULT_CLUSTER_NAME))); }
public RestResponse<KsqlEntityList> postKsqlRequest( final String ksql, final Map<String, ?> requestProperties, final Optional<Long> previousCommandSeqNum ) { return post( KSQL_PATH, createKsqlRequest(ksql, requestProperties, previousCommandSeqNum), r -> deserialize(r.getBody(), KsqlEntityList.class) ); }
@Test public void shouldSendAdditionalHeadersWithKsqlRequest() { // Given: final Map<String, String> additionalHeaders = ImmutableMap.of("h1", "v1", "h2", "v2"); ksqlTarget = new KsqlTarget(httpClient, socketAddress, localProperties, authHeader, HOST, additionalHeaders, RequestOptions.DEFAULT_TIMEOUT); // When: executor.submit(() -> { try { ksqlTarget.postKsqlRequest("some ksql;", Collections.emptyMap(), Optional.empty()); } catch (Exception e) { // ignore response error since this test is just testing headers on the outgoing request } }); assertThatEventually(requestStarted::get, is(true)); handlerCaptor.getValue().handle(Buffer.buffer()); // Then: verify(httpClientRequest).putHeader("h1", "v1"); verify(httpClientRequest).putHeader("h2", "v2"); }
public RMNode selectRackLocalNode( String rackName, Set<String> blacklist, Resource request) { Set<NodeId> nodesOnRack = nodeIdsByRack.get(rackName); if (nodesOnRack != null) { for (NodeId nodeId : nodesOnRack) { if (!blacklist.contains(nodeId.getHost())) { ClusterNode node = clusterNodes.get(nodeId); if (node != null && comparator.compareAndIncrement(node, 1, request)) { return nodeByHostName.get(nodeId.getHost()); } } } } return null; }
@Test public void testSelectRackLocalNode() { NodeQueueLoadMonitor selector = new NodeQueueLoadMonitor( NodeQueueLoadMonitor.LoadComparator.QUEUE_LENGTH); RMNode h1 = createRMNode("h1", 1, "rack1", -1, 2, 5); RMNode h2 = createRMNode("h2", 2, "rack2", -1, 5, 5); RMNode h3 = createRMNode("h3", 3, "rack2", -1, 4, 5); selector.addNode(null, h1); selector.addNode(null, h2); selector.addNode(null, h3); selector.updateNode(h1); selector.updateNode(h2); selector.updateNode(h3); // basic test for selecting node which has queue length less // than queue capacity. Set<String> blacklist = new HashSet<>(); RMNode node = selector.selectRackLocalNode( "rack1", blacklist, defaultResourceRequested); Assert.assertEquals("h1", node.getHostName()); // if node has been added to blacklist blacklist.add("h1"); node = selector.selectRackLocalNode( "rack1", blacklist, defaultResourceRequested); Assert.assertNull(node); node = selector.selectRackLocalNode( "rack2", blacklist, defaultResourceRequested); Assert.assertEquals("h3", node.getHostName()); blacklist.add("h3"); node = selector.selectRackLocalNode( "rack2", blacklist, defaultResourceRequested); Assert.assertNull(node); }
@Override @Deprecated public <VR> KStream<K, VR> transformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, ? extends VR> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), NamedInternal.empty(), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowBadValueTransformerWithKeySupplierOnTransformValues() { final ValueTransformerWithKey<String, String, String> transformer = valueTransformerWithKeySupplier.get(); final IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, () -> testStream.transformValues(() -> transformer) ); assertThat(exception.getMessage(), containsString("#get() must return a new object each time it is called.")); }
public boolean hasValidPluginAndStore(ArtifactStore artifactStore) { if (artifactStore == null) { return false; } ArtifactPluginInfo pluginInfo = ArtifactMetadataStore.instance().getPluginInfo(artifactStore.getPluginId()); return pluginInfo != null; }
@Test public void hasValidPluginAndStore_shouldReturnFalseIfPluginDoesNotExist() { ArtifactMetadataStore.instance().remove("cd.go.s3"); PluggableArtifactConfig pluggableArtifactConfig = new PluggableArtifactConfig("dist", "s3"); assertFalse(pluggableArtifactConfig.hasValidPluginAndStore(new ArtifactStore("s3", "cd.go.s3"))); }
@Override public Object convert(String value) { if (isNullOrEmpty(value)) { return value; } if (value.contains("=")) { final Map<String, String> fields = new HashMap<>(); Matcher m = PATTERN.matcher(value); while (m.find()) { if (m.groupCount() != 2) { continue; } fields.put(removeQuotes(m.group(1)), removeQuotes(m.group(2))); } return fields; } else { return Collections.emptyMap(); } }
@Test public void testFilterWithInvalidKVPairs() { TokenizerConverter f = new TokenizerConverter(new HashMap<String, Object>()); @SuppressWarnings("unchecked") Map<String, String> result = (Map<String, String>) f.convert("Ohai I am a message and this is a URL: index.php?foo=bar&baz=bar"); assertEquals(0, result.size()); }
static ParseResult parse(final int javaMajorVersion, final BufferedReader br) throws IOException { final ParseResult result = new ParseResult(); int lineNumber = 0; while (true) { final String line = br.readLine(); lineNumber++; if (line == null) { break; } try{ jvmOptionFromLine(javaMajorVersion, line).ifPresent(result::appendOption); } catch (IllegalArgumentException e){ result.appendError(lineNumber, line); }; } return result; }
@Test public void testErrorLinesAreReportedCorrectly() throws IOException { final String jvmOptionsContent = "10-11:-XX:+UseConcMarkSweepGC" + System.lineSeparator() + "invalidOption" + System.lineSeparator() + "-Duser.country=US" + System.lineSeparator() + "anotherInvalidOption"; JvmOptionsParser.ParseResult res = JvmOptionsParser.parse(11, asReader(jvmOptionsContent)); verifyOptions("Option must be present for Java 11", "-XX:+UseConcMarkSweepGC" + System.lineSeparator() + "-Duser.country=US", res); assertEquals("invalidOption", res.getInvalidLines().get(2)); assertEquals("anotherInvalidOption", res.getInvalidLines().get(4)); }
public void appendBytesAsPrintFriendlyString(StringBuilder builder, byte[] phiBytes) { appendBytesAsPrintFriendlyString(builder, phiBytes, 0, phiBytes != null ? phiBytes.length : 0); }
@Test public void testAppendBytesAsPrintFriendlyStringWithStartAndEndPositions() { StringBuilder builder = null; try { hl7util.appendBytesAsPrintFriendlyString(builder, null); fail("Exception should be raised with null StringBuilder argument"); } catch (IllegalArgumentException ignoredEx) { // Eat this } builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, null, 0, 1000); assertEquals(hl7util.NULL_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, null, 200, 1000); assertEquals(hl7util.NULL_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, null, -200, 1000); assertEquals(hl7util.NULL_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, null, 0, 0); assertEquals(hl7util.NULL_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, null, 200, 0); assertEquals(hl7util.NULL_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, null, -200, 0); assertEquals(hl7util.NULL_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, null, 0, -1000); assertEquals(hl7util.NULL_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, null, 200, -1000); assertEquals(hl7util.NULL_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, null, -200, -1000); assertEquals(hl7util.NULL_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, new byte[0], 0, 1000); assertEquals(hl7util.EMPTY_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, new byte[0], 200, 1000); assertEquals(hl7util.EMPTY_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, new byte[0], -200, 1000); assertEquals(hl7util.EMPTY_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, new byte[0], 0, 0); assertEquals(hl7util.EMPTY_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, new byte[0], 200, 0); assertEquals(hl7util.EMPTY_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, new byte[0], -200, 0); assertEquals(hl7util.EMPTY_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, new byte[0], 0, -1000); assertEquals(hl7util.EMPTY_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, new byte[0], 200, -1000); assertEquals(hl7util.EMPTY_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, new byte[0], -200, -1000); assertEquals(hl7util.EMPTY_REPLACEMENT_VALUE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, 0, TEST_MESSAGE_BYTES.length); assertEquals(EXPECTED_MESSAGE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, 0, 0); assertEquals("", builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, -14, TEST_MESSAGE_BYTES.length); assertEquals(EXPECTED_MESSAGE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, -14, 0); assertEquals("", builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, -14, -14); assertEquals("", builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, -14, 1000000); assertEquals(EXPECTED_MESSAGE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, 0, -14); assertEquals("", builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, 0, 1000000); assertEquals(EXPECTED_MESSAGE, builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, 1000000, TEST_MESSAGE_BYTES.length); assertEquals("", builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, 1000000, 0); assertEquals("", builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, 1000000, -14); assertEquals("", builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, 1000000, 1000000); assertEquals("", builder.toString()); builder = new StringBuilder(); hl7util.appendBytesAsPrintFriendlyString(builder, TEST_MESSAGE_BYTES, 54, 100); assertEquals("ORM^O01|00001|D|2.3|||||||<0x0D CR>PID|1||ICE999999^^^", builder.toString()); }
public static HazelcastInstance newHazelcastInstance(Config config) { if (config == null) { config = Config.load(); } return newHazelcastInstance( config, config.getInstanceName(), new DefaultNodeContext() ); }
@Test(expected = ExpectedRuntimeException.class) public void test_NewInstance_failed_beforeNodeStart() throws Exception { NodeContext context = new TestNodeContext() { @Override public NodeExtension createNodeExtension(Node node) { NodeExtension nodeExtension = super.createNodeExtension(node); doThrow(new ExpectedRuntimeException()).when(nodeExtension).beforeStart(); return nodeExtension; } }; Config config = new Config(); config.getNetworkConfig().getJoin().getAutoDetectionConfig().setEnabled(false); hazelcastInstance = HazelcastInstanceFactory.newHazelcastInstance(config, randomString(), context); }
@VisibleForTesting static byte[] padBigEndianBytes(byte[] bigEndianBytes, int newLength) { if (bigEndianBytes.length == newLength) { return bigEndianBytes; } else if (bigEndianBytes.length < newLength) { byte[] result = new byte[newLength]; if (bigEndianBytes.length == 0) { return result; } int start = newLength - bigEndianBytes.length; if (bigEndianBytes[0] < 0) { Arrays.fill(result, 0, start, (byte) 0xFF); } System.arraycopy(bigEndianBytes, 0, result, start, bigEndianBytes.length); return result; } throw new IllegalArgumentException( String.format( "Buffer size of %d is larger than requested size of %d", bigEndianBytes.length, newLength)); }
@Test public void testPadBigEndianBytes() { BigInteger bigInt = new BigInteger("12345"); byte[] bytes = bigInt.toByteArray(); byte[] paddedBytes = DecimalVectorUtil.padBigEndianBytes(bytes, 16); assertThat(paddedBytes).hasSize(16); BigInteger result = new BigInteger(paddedBytes); assertThat(result).isEqualTo(bigInt); }
@Override public Metrics toDay() { MinLabeledFunction metrics = (MinLabeledFunction) createNew(); metrics.setEntityId(getEntityId()); metrics.setTimeBucket(toTimeBucketInDay()); metrics.setServiceId(getServiceId()); metrics.getValue().copyFrom(getValue()); return metrics; }
@Test public void testToDay() { function.accept( MeterEntity.newService("service-test", Layer.GENERAL), HTTP_CODE_COUNT_1 ); function.accept( MeterEntity.newService("service-test", Layer.GENERAL), HTTP_CODE_COUNT_2 ); function.calculate(); final MinLabeledFunction dayFunction = (MinLabeledFunction) function.toDay(); dayFunction.calculate(); assertThat(dayFunction.getValue()).isEqualTo(HTTP_CODE_COUNT_3); }
public void logReplaySessionError(final long sessionId, final long recordingId, final String errorMessage) { final int length = SIZE_OF_LONG * 2 + SIZE_OF_INT + errorMessage.length(); final int captureLength = captureLength(length); final int encodedLength = encodedLength(captureLength); final ManyToOneRingBuffer ringBuffer = this.ringBuffer; final int index = ringBuffer.tryClaim(REPLAY_SESSION_ERROR.toEventCodeId(), encodedLength); if (index > 0) { try { encodeReplaySessionError( (UnsafeBuffer)ringBuffer.buffer(), index, captureLength, length, sessionId, recordingId, errorMessage); } finally { ringBuffer.commit(index); } } }
@Test void logReplaySessionError() { final int offset = ALIGNMENT * 5 + 128; logBuffer.putLong(CAPACITY + TAIL_POSITION_OFFSET, offset); final long sessionId = 123; final long recordingId = Long.MIN_VALUE; final String errorMessage = "the error"; final int captureLength = SIZE_OF_LONG * 2 + SIZE_OF_INT + errorMessage.length(); logger.logReplaySessionError(sessionId, recordingId, errorMessage); verifyLogHeader(logBuffer, offset, REPLAY_SESSION_ERROR.toEventCodeId(), captureLength, captureLength); assertEquals(sessionId, logBuffer.getLong(encodedMsgOffset(offset + LOG_HEADER_LENGTH), LITTLE_ENDIAN)); assertEquals(recordingId, logBuffer.getLong(encodedMsgOffset(offset + LOG_HEADER_LENGTH + SIZE_OF_LONG), LITTLE_ENDIAN)); assertEquals(errorMessage, logBuffer.getStringAscii(encodedMsgOffset(offset + LOG_HEADER_LENGTH + SIZE_OF_LONG * 2))); }
@SuppressWarnings("unused") // Part of required API. public void execute( final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final InsertValues insertValues = statement.getStatement(); final MetaStore metaStore = executionContext.getMetaStore(); final KsqlConfig config = statement.getSessionConfig().getConfig(true); final DataSource dataSource = getDataSource(config, metaStore, insertValues); validateInsert(insertValues.getColumns(), dataSource); final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext); try { producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps()); } catch (final TopicAuthorizationException e) { // TopicAuthorizationException does not give much detailed information about why it failed, // except which topics are denied. Here we just add the ACL to make the error message // consistent with other authorization error messages. final Exception rootCause = new KsqlTopicAuthorizationException( AclOperation.WRITE, e.unauthorizedTopics() ); throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause); } catch (final ClusterAuthorizationException e) { // ClusterAuthorizationException is thrown when using idempotent producers // and either a topic write permission or a cluster-level idempotent write // permission (only applicable for broker versions no later than 2.8) is // missing. In this case, we include additional context to help the user // distinguish this type of failure from other permissions exceptions // such as the ones thrown above when TopicAuthorizationException is caught. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } catch (final KafkaException e) { if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) { // The error message thrown when an idempotent producer is missing permissions // is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException, // as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException. // ksqlDB handles these two the same way, accordingly. // See https://issues.apache.org/jira/browse/KAFKA-14138 for more. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } else { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } } catch (final Exception e) { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } }
@Test public void shouldThrowOnInsertKeyHeaders() { // Given: givenSourceStreamWithSchema(SCHEMA_WITH_KEY_HEADERS, SerdeFeatures.of(), SerdeFeatures.of()); final ConfiguredStatement<InsertValues> statement = givenInsertValues( allColumnNames(SCHEMA_WITH_KEY_HEADERS), ImmutableList.of( new StringLiteral("key"), new StringLiteral("str"), new LongLiteral(2L), new NullLiteral(), new NullLiteral() ) ); // When: final Exception e = assertThrows( KsqlException.class, () -> executor.execute(statement, mock(SessionProperties.class), engine, serviceContext) ); // Then: assertThat(e.getMessage(), is("Cannot insert into HEADER columns: HEAD0, HEAD1")); }
public synchronized TopologyDescription describe() { return internalTopologyBuilder.describe(); }
@Test public void shouldDescribeMultipleGlobalStoreTopology() { addGlobalStoreToTopologyAndExpectedDescription("globalStore1", "source1", "globalTopic1", "processor1", 0); addGlobalStoreToTopologyAndExpectedDescription("globalStore2", "source2", "globalTopic2", "processor2", 1); assertThat(topology.describe(), equalTo(expectedDescription)); assertThat(topology.describe().hashCode(), equalTo(expectedDescription.hashCode())); }
public static Socket fork(ZContext ctx, IAttachedRunnable runnable, Object... args) { Socket pipe = ctx.createSocket(SocketType.PAIR); assert (pipe != null); pipe.bind(String.format(Locale.ENGLISH, "inproc://zctx-pipe-%d", pipe.hashCode())); // Connect child pipe to our pipe ZContext ccontext = ctx.shadow(); Socket cpipe = ccontext.createSocket(SocketType.PAIR); assert (cpipe != null); cpipe.connect(String.format(Locale.ENGLISH, "inproc://zctx-pipe-%d", pipe.hashCode())); // Prepare child thread Thread shim = new ShimThread(ccontext, runnable, args, cpipe); shim.start(); return pipe; }
@Test(timeout = 1000) public void testCriticalException() throws InterruptedException { final CountDownLatch stopped = new CountDownLatch(1); try (final ZContext ctx = new ZContext()) { ctx.setUncaughtExceptionHandler((t, e) -> stopped.countDown()); Socket pipe = ZThread.fork(ctx, (args, ctx1, pipe1) -> { throw new Error("critical"); }); assertThat(pipe, notNullValue()); stopped.await(); } }
public static String collectPath(String... pathParts) { final StringBuilder sb = new StringBuilder(); for (String item : pathParts) { if (StringUtils.isBlank(item)) { continue; } final String path = trimPath(item); if (StringUtils.isNotBlank(path)) { sb.append(SLASH).append(path); } } return sb.length() > 0 ? sb.toString() : String.valueOf(SLASH); }
@Test(description = "collect path") public void collectPath() { final String path = PathUtils.collectPath("api", "/users/", "{userId}/"); assertEquals(path, "/api/users/{userId}"); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() != ChatMessageType.SPAM) { return; } if (event.getMessage().startsWith("You retrieve a bar of")) { if (session == null) { session = new SmeltingSession(); } session.increaseBarsSmelted(); } else if (event.getMessage().endsWith(" to form 8 cannonballs.")) { cannonBallsMade = 8; } else if (event.getMessage().endsWith(" to form 4 cannonballs.")) { cannonBallsMade = 4; } else if (event.getMessage().startsWith("You remove the cannonballs from the mould")) { if (session == null) { session = new SmeltingSession(); } session.increaseCannonBallsSmelted(cannonBallsMade); } }
@Test public void testCannonballsDoubleAmmoMould() { ChatMessage chatMessageDoubleAmmoMould = new ChatMessage(null, ChatMessageType.SPAM, "", SMELT_CANNONBALL_DOUBLE_AMMO_MOULD, "", 0); smeltingPlugin.onChatMessage(chatMessageDoubleAmmoMould); ChatMessage chatMessageDone = new ChatMessage(null, ChatMessageType.SPAM, "", SMELT_CANNONBALL_DONE_MESSAGE, "", 0); smeltingPlugin.onChatMessage(chatMessageDone); SmeltingSession smeltingSession = smeltingPlugin.getSession(); assertNotNull(smeltingSession); assertEquals(8, smeltingSession.getCannonBallsSmelted()); }
public Map<String, String> getFullCwes() { final Map<String, String> map = new HashMap<>(); cwes.forEach((cwe) -> map.put(cwe, CweDB.getName(cwe))); return map; }
@Test public void testGetFullCwes() { CweSet instance = new CweSet(); instance.addCwe("CWE-89"); instance.addCwe("CWE-79"); Map<String, String> expResult = new HashMap<>(); expResult.put("CWE-79", "Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')"); expResult.put("CWE-89", "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')"); Map<String, String> result = instance.getFullCwes(); for (Map.Entry<String,String> entry : expResult.entrySet()) { assertTrue(result.get(entry.getKey()).equals(entry.getValue())); } }
@Override public void init(Set<String> fields, RecordExtractorConfig recordExtractorConfig) { CSVRecordExtractorConfig csvRecordExtractorConfig = (CSVRecordExtractorConfig) recordExtractorConfig; if (fields == null || fields.isEmpty()) { _fields = csvRecordExtractorConfig.getColumnNames(); } else { _fields = ImmutableSet.copyOf(fields); } _multiValueDelimiter = csvRecordExtractorConfig.getMultiValueDelimiter(); }
@Test public void testEscapeCharacterInCSV() throws Exception { // Create CSV config with backslash as escape character. CSVRecordReaderConfig csvRecordReaderConfig = new CSVRecordReaderConfig(); csvRecordReaderConfig.setEscapeCharacter('\\'); // Create a CSV file where records have two values and the second value contains an escaped comma. File escapedFile = new File(_tempDir, "escape.csv"); BufferedWriter writer = new BufferedWriter(new FileWriter(escapedFile)); writer.write("first,second\n"); writer.write("string1, string2\\, string3"); writer.close(); // Try to parse CSV file with escaped comma. CSVRecordReader csvRecordReader = new CSVRecordReader(); HashSet<String> fieldsToRead = new HashSet<>(); fieldsToRead.add("first"); fieldsToRead.add("second"); csvRecordReader.init(escapedFile, fieldsToRead, csvRecordReaderConfig); GenericRow genericRow = new GenericRow(); csvRecordReader.rewind(); // check if parsing succeeded. Assert.assertTrue(csvRecordReader.hasNext()); csvRecordReader.next(genericRow); Assert.assertEquals(genericRow.getValue("first"), "string1"); Assert.assertEquals(genericRow.getValue("second"), "string2, string3"); }
@Override public Column convert(BasicTypeDefine typeDefine) { try { return super.convert(typeDefine); } catch (SeaTunnelRuntimeException e) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String kingbaseDataType = typeDefine.getDataType().toUpperCase(); switch (kingbaseDataType) { case KB_TINYINT: builder.dataType(BasicType.BYTE_TYPE); break; case KB_MONEY: builder.dataType(new DecimalType(38, 18)); builder.columnLength(38L); builder.scale(18); break; case KB_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength((long) (1024 * 1024 * 1024)); break; case KB_CLOB: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(typeDefine.getLength()); builder.columnLength((long) (1024 * 1024 * 1024)); break; case KB_BIT: builder.dataType(PrimitiveByteArrayType.INSTANCE); // BIT(M) -> BYTE(M/8) long byteLength = typeDefine.getLength() / 8; byteLength += typeDefine.getLength() % 8 > 0 ? 1 : 0; builder.columnLength(byteLength); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.KINGBASE, typeDefine.getDataType(), typeDefine.getName()); } return builder.build(); } }
@Test public void testConvertFloat() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("float4") .dataType("float4") .build(); Column column = KingbaseTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.FLOAT_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase()); }
private synchronized boolean validateClientAcknowledgement(long h) { if (h < 0) { throw new IllegalArgumentException("Argument 'h' cannot be negative, but was: " + h); } if (h > MASK) { throw new IllegalArgumentException("Argument 'h' cannot be larger than 2^32 -1, but was: " + h); } final long oldH = clientProcessedStanzas.get(); final Long lastUnackedX = unacknowledgedServerStanzas.isEmpty() ? null : unacknowledgedServerStanzas.getLast().x; return validateClientAcknowledgement(h, oldH, lastUnackedX); }
@Test public void testValidateClientAcknowledgement_rollover_edgecase5_unsent() throws Exception { // Setup test fixture. final long MAX = new BigInteger( "2" ).pow( 32 ).longValue() - 1; final long h = 5; final long oldH = MAX - 2; final Long lastUnackedX = 4L; // Execute system under test. final boolean result = StreamManager.validateClientAcknowledgement(h, oldH, lastUnackedX); // Verify results. assertFalse(result); }
@Override public void execute(GraphModel graphModel) { isCanceled = false; UndirectedGraph undirectedGraph = graphModel.getUndirectedGraphVisible(); Column weaklyConnectedColumn = initializeWeaklyConnectedColumn(graphModel); Column stronglyConnectedColumn = null; if (isDirected) { stronglyConnectedColumn = initializeStronglyConnectedColumn(graphModel); } undirectedGraph.readLock(); try { weaklyConnected(undirectedGraph, weaklyConnectedColumn); if (isDirected) { DirectedGraph directedGraph = graphModel.getDirectedGraphVisible(); stronglyConnected(directedGraph, graphModel, stronglyConnectedColumn); } } finally { undirectedGraph.readUnlock(); } }
@Test public void testColumnCreation() { GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(1); ConnectedComponents cc = new ConnectedComponents(); cc.execute(graphModel); Assert.assertTrue(graphModel.getNodeTable().hasColumn(ConnectedComponents.WEAKLY)); }