focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public void getConfig(FederationConfig.Builder builder) { for (Target target : resolvedTargets.values()) builder.target(target.getTargetConfig()); targetSelector.ifPresent(selector -> builder.targetSelector(selector.getGlobalComponentId().stringValue())); }
@Test void source_groups_are_inherited_when_inheritDefaultSources_is_true() throws Exception { FederationFixture f = new ProvidersWithSourceFixture(); FederationConfig federationConfig = getConfig(f.federationSearchWithDefaultSources); assertEquals(1, federationConfig.target().size()); FederationConfig.Target target = federationConfig.target(0); assertEquals(target.id(), "source"); assertTrue(target.useByDefault(), "Not used by default"); assertEquals(2, target.searchChain().size()); assertTrue(target.searchChain().stream() .map(FederationConfig.Target.SearchChain::providerId) .toList().containsAll(List.of("provider1", "provider2"))); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void addStickerToSet() { BaseResponse response = bot.execute( new AddStickerToSet(chatId, stickerSet, new InputSticker("BQADAgADuAAD7yupS4eB23UmZhGuAg", Sticker.Format.Static, new String[]{"\uD83D\uDE15"}) .maskPosition(new MaskPosition("eyes", 0f, 0f, 1f)))); assertTrue(response.isOk()); }
public static RegisteredClient parseRegistered(String jsonString) { JsonElement jsonEl = parser.parse(jsonString); return parseRegistered(jsonEl); }
@Test public void testParseRegistered() { String json = " {\n" + " \"client_id\": \"s6BhdRkqt3\",\n" + " \"client_secret\":\n" + " \"ZJYCqe3GGRvdrudKyZS0XhGv_Z45DuKhCUk0gBR1vZk\",\n" + " \"client_secret_expires_at\": 1577858400,\n" + " \"registration_access_token\":\n" + " \"this.is.an.access.token.value.ffx83\",\n" + " \"registration_client_uri\":\n" + " \"https://server.example.com/connect/register?client_id=s6BhdRkqt3\",\n" + " \"token_endpoint_auth_method\":\n" + " \"client_secret_basic\",\n" + " \"application_type\": \"web\",\n" + " \"redirect_uris\":\n" + " [\"https://client.example.org/callback\",\n" + " \"https://client.example.org/callback2\"],\n" + " \"client_name\": \"My Example\",\n" + " \"client_name#ja-Jpan-JP\":\n" + " \"クライアント名\",\n" + " \"response_types\": [\"code\", \"token\"],\n" + " \"grant_types\": [\"authorization_code\", \"implicit\"],\n" + " \"logo_uri\": \"https://client.example.org/logo.png\",\n" + " \"subject_type\": \"pairwise\",\n" + " \"sector_identifier_uri\":\n" + " \"https://other.example.net/file_of_redirect_uris.json\",\n" + " \"jwks_uri\": \"https://client.example.org/my_public_keys.jwks\",\n" + " \"userinfo_encrypted_response_alg\": \"RSA1_5\",\n" + " \"userinfo_encrypted_response_enc\": \"A128CBC-HS256\",\n" + " \"contacts\": [\"ve7jtb@example.org\", \"mary@example.org\"],\n" + " \"request_uris\":\n" + " [\"https://client.example.org/rf.txt#qpXaRLh_n93TTR9F252ValdatUQvQiJi5BDub2BeznA\"]\n" + " }"; RegisteredClient c = ClientDetailsEntityJsonProcessor.parseRegistered(json); assertEquals("s6BhdRkqt3", c.getClientId()); assertEquals("ZJYCqe3GGRvdrudKyZS0XhGv_Z45DuKhCUk0gBR1vZk", c.getClientSecret()); assertEquals(new Date(1577858400L * 1000L), c.getClientSecretExpiresAt()); assertEquals("this.is.an.access.token.value.ffx83", c.getRegistrationAccessToken()); assertEquals("https://server.example.com/connect/register?client_id=s6BhdRkqt3", c.getRegistrationClientUri()); assertEquals(ClientDetailsEntity.AppType.WEB, c.getApplicationType()); assertEquals(ImmutableSet.of("https://client.example.org/callback", "https://client.example.org/callback2"), c.getRedirectUris()); assertEquals("My Example", c.getClientName()); assertEquals(ImmutableSet.of("code", "token"), c.getResponseTypes()); assertEquals(ImmutableSet.of("authorization_code", "implicit"), c.getGrantTypes()); assertEquals("https://client.example.org/logo.png", c.getLogoUri()); assertEquals(ClientDetailsEntity.SubjectType.PAIRWISE, c.getSubjectType()); assertEquals("https://other.example.net/file_of_redirect_uris.json", c.getSectorIdentifierUri()); assertEquals(ClientDetailsEntity.AuthMethod.SECRET_BASIC, c.getTokenEndpointAuthMethod()); assertEquals("https://client.example.org/my_public_keys.jwks", c.getJwksUri()); assertEquals(JWEAlgorithm.RSA1_5, c.getUserInfoEncryptedResponseAlg()); assertEquals(EncryptionMethod.A128CBC_HS256, c.getUserInfoEncryptedResponseEnc()); assertEquals(ImmutableSet.of("ve7jtb@example.org", "mary@example.org"), c.getContacts()); assertEquals(ImmutableSet.of("https://client.example.org/rf.txt#qpXaRLh_n93TTR9F252ValdatUQvQiJi5BDub2BeznA"), c.getRequestUris()); }
public static boolean isConfigurator(URL url) { return OVERRIDE_PROTOCOL.equals(url.getProtocol()) || CONFIGURATORS_CATEGORY.equals(url.getCategory(DEFAULT_CATEGORY)); }
@Test public void testIsConfigurator() { String address1 = "http://example.com"; URL url1 = UrlUtils.parseURL(address1, null); String address2 = "override://example.com"; URL url2 = UrlUtils.parseURL(address2, null); String address3 = "http://example.com?category=configurators"; URL url3 = UrlUtils.parseURL(address3, null); assertFalse(UrlUtils.isConfigurator(url1)); assertTrue(UrlUtils.isConfigurator(url2)); assertTrue(UrlUtils.isConfigurator(url3)); }
public JobStatsExtended enrich(JobStats jobStats) { JobStats latestJobStats = getLatestJobStats(jobStats, previousJobStats); if (lock.tryLock()) { setFirstRelevantJobStats(latestJobStats); setJobStatsExtended(latestJobStats); setPreviousJobStats(latestJobStats); lock.unlock(); } return jobStatsExtended; }
@Test void jobStatsAreKeptToDoCalculations() { JobStats firstJobStats = getJobStats(0L, 0L, 0L, 80L); JobStats secondJobStats = getJobStats(10L, 0L, 0L, 100L); JobStats thirdJobStats = getJobStats(10L, 0L, 0L, 105L); jobStatsEnricher.enrich(firstJobStats); jobStatsEnricher.enrich(secondJobStats); jobStatsEnricher.enrich(thirdJobStats); JobStats jobStats = Whitebox.getInternalState(jobStatsEnricher, "previousJobStats"); assertThat(jobStats).isEqualToComparingFieldByField(thirdJobStats); }
@GetMapping(value = "/self") @Secured(resource = Commons.NACOS_CORE_CONTEXT + "/cluster", action = ActionTypes.READ, signType = SignType.CONSOLE) public RestResult<Member> self() { return RestResultUtils.success(memberManager.getSelf()); }
@Test void testSelf() { Member self = new Member(); Mockito.when(serverMemberManager.getSelf()).thenReturn(self); RestResult<Member> result = nacosClusterController.self(); assertEquals(self, result.getData()); }
static public boolean createMissingParentDirectories(File file) { File parent = file.getParentFile(); if (parent == null) { // Parent directory not specified, therefore it's a request to // create nothing. Done! ;) return true; } // File.mkdirs() creates the parent directories only if they don't // already exist; and it's okay if they do. parent.mkdirs(); return parent.exists(); }
@Test public void createParentDirIgnoresExistingDir() { String target = CoreTestConstants.OUTPUT_DIR_PREFIX + "/fu" + diff + "/testing.txt"; File file = new File(target); cleanupList.add(file); file.mkdirs(); assertTrue(file.getParentFile().exists()); assertTrue(FileUtil.createMissingParentDirectories(file)); }
@Override public TenantPackageDO validTenantPackage(Long id) { TenantPackageDO tenantPackage = tenantPackageMapper.selectById(id); if (tenantPackage == null) { throw exception(TENANT_PACKAGE_NOT_EXISTS); } if (tenantPackage.getStatus().equals(CommonStatusEnum.DISABLE.getStatus())) { throw exception(TENANT_PACKAGE_DISABLE, tenantPackage.getName()); } return tenantPackage; }
@Test public void testValidTenantPackage_success() { // mock 数据 TenantPackageDO dbTenantPackage = randomPojo(TenantPackageDO.class, o -> o.setStatus(CommonStatusEnum.ENABLE.getStatus())); tenantPackageMapper.insert(dbTenantPackage);// @Sql: 先插入出一条存在的数据 // 调用 TenantPackageDO result = tenantPackageService.validTenantPackage(dbTenantPackage.getId()); // 断言 assertPojoEquals(dbTenantPackage, result); }
public Img pressText(String pressText, Color color, Font font, int x, int y, float alpha) { return pressText(pressText, color, font, new Point(x, y), alpha); }
@Test @Disabled public void pressTextTest() { Img.from(FileUtil.file("d:/test/617180969474805871.jpg")) .setPositionBaseCentre(false) .pressText("版权所有", Color.RED, // new Font("黑体", Font.BOLD, 100), // 0, // 100, // 1f) .write(FileUtil.file("d:/test/test2_result.png")); }
@Override public DocInfo parseJson(final JsonObject docRoot) { final String basePath = docRoot.get("basePath").getAsString(); final String title = Optional.ofNullable(docRoot.getAsJsonObject("info")).map(jsonObject -> jsonObject.get("title").getAsString()).orElse(basePath); final List<DocItem> docItems = new ArrayList<>(); JsonObject paths = docRoot.getAsJsonObject("paths"); if (Objects.isNull(paths)) { paths = new JsonObject(); } Set<String> pathNameSet = paths.keySet(); for (String apiPath : pathNameSet) { JsonObject pathInfo = paths.getAsJsonObject(apiPath); Collection<String> httpMethodList = getHttpMethods(pathInfo); Optional<String> first = httpMethodList.stream().findFirst(); if (first.isPresent()) { String method = first.get(); JsonObject docInfo = pathInfo.getAsJsonObject(method); docInfo.addProperty("real_req_path", apiPath); docInfo.addProperty("basePath", basePath); DocItem docItem = buildDocItem(docInfo, docRoot); if (Objects.isNull(docItem)) { continue; } if (docItem.isUploadRequest()) { docItem.setHttpMethodList(Sets.newHashSet("post")); } else { docItem.setHttpMethodList(httpMethodList); } docItems.add(docItem); } } docItems.sort(Comparator.comparing(DocItem::getApiOrder).thenComparing(DocItem::getName)); List<DocModule> docModuleList = docItems.stream() .collect(Collectors.groupingBy(DocItem::getModule)) .entrySet() .stream() .map(entry -> { List<DocItem> docItemList = entry.getValue(); DocModule docModule = new DocModule(); docModule.setModule(entry.getKey()); docModule.setDocItems(docItemList); docModule.setOrder(getMuduleOrder(docItemList)); return docModule; }) .sorted(Comparator.comparing(DocModule::getOrder)) .collect(Collectors.toList()); DocInfo docInfo = new DocInfo(); docInfo.setTitle(title); docInfo.setDocModuleList(docModuleList); return docInfo; }
@Test public void testParseJson() { JsonObject docRoot = GsonUtils.getInstance().fromJson(DOC_INFO_JSON, JsonObject.class); docRoot.addProperty("basePath", "/" + "testClusterName"); DocInfo docInfo = swaggerDocParser.parseJson(docRoot); assert docInfo.getDocModuleList().get(0).getModule().equals("Order API"); }
protected String getFileName(double lat, double lon) { lon = 1 + (180 + lon) / LAT_DEGREE; int lonInt = (int) lon; lat = 1 + (60 - lat) / LAT_DEGREE; int latInt = (int) lat; if (Math.abs(latInt - lat) < invPrecision / LAT_DEGREE) latInt--; // replace String.format as it seems to be slow // String.format("srtm_%02d_%02d", lonInt, latInt); String str = "srtm_"; str += lonInt < 10 ? "0" : ""; str += lonInt; str += latInt < 10 ? "_0" : "_"; str += latInt; return str; }
@Test public void testFileName() { assertEquals("srtm_36_02", instance.getFileName(52, -0.1)); assertEquals("srtm_35_02", instance.getFileName(50, -10)); assertEquals("srtm_36_23", instance.getFileName(-52, -0.1)); assertEquals("srtm_35_22", instance.getFileName(-50, -10)); assertEquals("srtm_39_03", instance.getFileName(49.9, 11.5)); assertEquals("srtm_34_08", instance.getFileName(20, -11)); assertEquals("srtm_34_08", instance.getFileName(20, -14)); assertEquals("srtm_34_08", instance.getFileName(20, -15)); assertEquals("srtm_37_02", instance.getFileName(52.1943832, 0.1363176)); }
@Override public void authenticate( final JsonObject authInfo, final Handler<AsyncResult<User>> resultHandler ) { final String username = authInfo.getString("username"); if (username == null) { resultHandler.handle(Future.failedFuture("authInfo missing 'username' field")); return; } final String password = authInfo.getString("password"); if (password == null) { resultHandler.handle(Future.failedFuture("authInfo missing 'password' field")); return; } server.getWorkerExecutor().executeBlocking( promisedUser -> getUser(contextName, username, password, promisedUser), false, resultHandler ); }
@Test public void shouldFailToAuthenticateWithNoRole() throws Exception { // Given: givenAllowedRoles("user"); givenUserRoles(); // When: authProvider.authenticate(authInfo, userHandler); // Then: verifyUnauthorizedSuccessfulLogin(); }
@Override public boolean processArgument(final ShenyuRequest shenyuRequest, final Annotation annotation, final Object arg) { if (arg instanceof String) { shenyuRequest.setBody((String) arg); } else { shenyuRequest.setBody(JsonUtils.toJson(arg)); } return true; }
@Test public void processArgumentNullTest() { final RequestBody body = mock(RequestBody.class); processor.processArgument(request, body, ""); assertEquals(request.getBody(), ""); }
public static HttpRequest toNettyRequest(RestRequest request) throws Exception { HttpMethod nettyMethod = HttpMethod.valueOf(request.getMethod()); URL url = new URL(request.getURI().toString()); String path = url.getFile(); // RFC 2616, section 5.1.2: // Note that the absolute path cannot be empty; if none is present in the original URI, // it MUST be given as "/" (the server root). if (path.isEmpty()) { path = "/"; } ByteBuf content = Unpooled.wrappedBuffer(request.getEntity().asByteBuffer()); HttpRequest nettyRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, nettyMethod, path, content); nettyRequest.headers().set(HttpConstants.CONTENT_LENGTH, request.getEntity().length()); setHttpHeadersAndCookies(request, url, nettyRequest); return nettyRequest; }
@Test public void testStreamToNettyRequest() throws Exception { StreamRequestBuilder streamRequestBuilder = new StreamRequestBuilder(new URI(ANY_URI)); streamRequestBuilder.setMethod("POST"); streamRequestBuilder.setHeader("Content-Length", Integer.toString(ANY_ENTITY.length())); streamRequestBuilder.setHeader("Content-Type", "application/json"); streamRequestBuilder.setCookies(Collections.singletonList(ANY_COOKIE)); StreamRequest streamRequest = streamRequestBuilder.build( EntityStreams.newEntityStream(new ByteStringWriter(ByteString.copy(ANY_ENTITY.getBytes())))); HttpRequest nettyRequest = NettyRequestAdapter.toNettyRequest(streamRequest); Assert.assertEquals(nettyRequest.uri(), "/foo/bar?q=baz"); Assert.assertEquals(nettyRequest.method(), HttpMethod.POST); Assert.assertEquals(nettyRequest.protocolVersion(), HttpVersion.HTTP_1_1); Assert.assertNull(nettyRequest.headers().get("Content-Length")); Assert.assertEquals(nettyRequest.headers().get("Content-Type"), "application/json"); Assert.assertEquals(nettyRequest.headers().get("Cookie"), ANY_COOKIE); }
public Iterator<Long> iterator() { return new SequenceIterator(); }
@Test public void testIterator() { SequenceSet set = new SequenceSet(); set.add(new Sequence(0, 2)); set.add(new Sequence(4, 5)); set.add(new Sequence(7)); set.add(new Sequence(20, 21)); long expected[] = new long[]{0, 1, 2, 4, 5, 7, 20, 21}; int index = 0; Iterator<Long> iterator = set.iterator(); while(iterator.hasNext()) { assertEquals(expected[index++], iterator.next().longValue()); } }
@Override public Image getIcon(String pluginId) { return pluginRequestHelper.submitRequest(pluginId, SecretsPluginConstants.REQUEST_GET_PLUGIN_ICON, new DefaultPluginInteractionCallback<>() { @Override public Image onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return secretsMessageConverterV1.getImageFromResponseBody(responseBody); } }); }
@Test void shouldTalkToPlugin_toGetIcon() { when(pluginManager.submitTo(eq(PLUGIN_ID), eq(SECRETS_EXTENSION), requestArgumentCaptor.capture())) .thenReturn(DefaultGoPluginApiResponse.success("{\"content_type\":\"image/png\",\"data\":\"Zm9vYmEK\"}")); final Image icon = secretsExtensionV1.getIcon(PLUGIN_ID); assertThat(icon.getContentType()).isEqualTo("image/png"); assertThat(icon.getData()).isEqualTo("Zm9vYmEK"); assertExtensionRequest(REQUEST_GET_PLUGIN_ICON, null); }
public CompletableFuture<EndTransactionResponse> endTransaction(ProxyContext ctx, EndTransactionRequest request) { CompletableFuture<EndTransactionResponse> future = new CompletableFuture<>(); try { validateTopic(request.getTopic()); if (StringUtils.isBlank(request.getTransactionId())) { throw new GrpcProxyException(Code.INVALID_TRANSACTION_ID, "transaction id cannot be empty"); } TransactionStatus transactionStatus = TransactionStatus.UNKNOWN; TransactionResolution transactionResolution = request.getResolution(); switch (transactionResolution) { case COMMIT: transactionStatus = TransactionStatus.COMMIT; break; case ROLLBACK: transactionStatus = TransactionStatus.ROLLBACK; break; default: break; } future = this.messagingProcessor.endTransaction( ctx, request.getTopic().getName(), request.getTransactionId(), request.getMessageId(), request.getTopic().getName(), transactionStatus, request.getSource().equals(TransactionSource.SOURCE_SERVER_CHECK)) .thenApply(r -> EndTransactionResponse.newBuilder() .setStatus(ResponseBuilder.getInstance().buildStatus(Code.OK, Code.OK.name())) .build()); } catch (Throwable t) { future.completeExceptionally(t); } return future; }
@Test public void testEndTransaction() throws Throwable { ArgumentCaptor<TransactionStatus> transactionStatusCaptor = ArgumentCaptor.forClass(TransactionStatus.class); ArgumentCaptor<Boolean> fromTransactionCheckCaptor = ArgumentCaptor.forClass(Boolean.class); when(this.messagingProcessor.endTransaction(any(), any(), anyString(), anyString(), anyString(), transactionStatusCaptor.capture(), fromTransactionCheckCaptor.capture())).thenReturn(CompletableFuture.completedFuture(null)); EndTransactionResponse response = this.endTransactionActivity.endTransaction( createContext(), EndTransactionRequest.newBuilder() .setResolution(resolution) .setTopic(Resource.newBuilder().setName("topic").build()) .setMessageId(MessageClientIDSetter.createUniqID()) .setTransactionId(MessageClientIDSetter.createUniqID()) .setSource(source) .build() ).get(); assertEquals(Code.OK, response.getStatus().getCode()); assertEquals(transactionStatus, transactionStatusCaptor.getValue()); assertEquals(fromTransactionCheck, fromTransactionCheckCaptor.getValue()); }
protected void insertModelBefore(EpoxyModel<?> modelToInsert, EpoxyModel<?> modelToInsertBefore) { int targetIndex = getModelPosition(modelToInsertBefore); if (targetIndex == -1) { throw new IllegalStateException("Model is not added: " + modelToInsertBefore); } pauseModelListNotifications(); models.add(targetIndex, modelToInsert); resumeModelListNotifications(); notifyItemInserted(targetIndex); }
@Test(expected = IllegalStateException.class) public void testInsertModelBeforeThrowsForInvalidModel() { testAdapter.insertModelBefore(new TestModel(), new TestModel()); }
private <T> T accept(Expression<T> expr) { return expr.accept(this); }
@Test public void testGreater() throws Exception { assertThat(Expr.Greater.create( Expr.NumberValue.create(2), Expr.NumberValue.create(1) ).accept(new BooleanNumberConditionsVisitor())) .isTrue(); assertThat(Expr.Greater.create( Expr.NumberValue.create(1), Expr.NumberValue.create(2) ).accept(new BooleanNumberConditionsVisitor())) .isFalse(); assertThat(Expr.Greater.create( Expr.NumberValue.create(2), Expr.NumberValue.create(2) ).accept(new BooleanNumberConditionsVisitor())) .isFalse(); assertThat(loadCondition("condition-greater.json").accept(new BooleanNumberConditionsVisitor())) .isTrue(); }
@Override public Set<ReservationAllocation> getReservations(ReservationId reservationID, ReservationInterval interval) { return getReservations(reservationID, interval, null); }
@Test public void testGetReservationsWithNoReservation() { Plan plan = new InMemoryPlan(queueMetrics, policy, agent, totalCapacity, 1L, resCalc, minAlloc, maxAlloc, planName, replanner, true, context); // Verify that get reservation returns no entries if no queries are made. ReservationInterval interval = new ReservationInterval(0, Long.MAX_VALUE); Set<ReservationAllocation> rAllocations = plan.getReservations(null, interval, ""); Assert.assertTrue(rAllocations.size() == 0); }
public Properties getProperties() { return properties; }
@Test public void testHibernateProperties() { assertNull(Configuration.INSTANCE.getProperties().getProperty("hibernate.types.nothing")); assertEquals("def", Configuration.INSTANCE.getProperties().getProperty("hibernate.types.abc")); }
@Override public ModuleState build() { ModuleState state = new ModuleState(Constants.SYS_MODULE); state.newState(Constants.STARTUP_MODE_STATE, EnvUtil.getStandaloneMode() ? EnvUtil.STANDALONE_MODE_ALONE : EnvUtil.STANDALONE_MODE_CLUSTER); state.newState(Constants.FUNCTION_MODE_STATE, EnvUtil.getFunctionMode()); state.newState(Constants.NACOS_VERSION, VersionUtils.version); state.newState(Constants.SERVER_PORT_STATE, EnvUtil.getPort()); return state; }
@Test void testBuild() { ModuleState actual = new EnvModuleStateBuilder().build(); assertEquals(Constants.SYS_MODULE, actual.getModuleName()); assertEquals(EnvUtil.STANDALONE_MODE_ALONE, actual.getStates().get(Constants.STARTUP_MODE_STATE)); assertNull(actual.getStates().get(Constants.FUNCTION_MODE_STATE), EnvUtil.FUNCTION_MODE_NAMING); assertEquals(VersionUtils.version, actual.getStates().get(Constants.NACOS_VERSION)); }
private static SuggestedFix threadLocalFix( VariableTree tree, VisitorState state, VarSymbol sym, SuggestedFix rename) { SuggestedFix.Builder fix = rename.toBuilder() .replace( tree.getType(), String.format("ThreadLocal<%s>", state.getSourceForNode(tree.getType()))) .prefixWith(tree.getInitializer(), "ThreadLocal.withInitial(() -> ") .postfixWith(tree.getInitializer(), ")"); CompilationUnitTree unit = state.getPath().getCompilationUnit(); unit.accept( new TreeScanner<Void, Void>() { @Override public Void visitIdentifier(IdentifierTree tree, Void unused) { if (Objects.equals(ASTHelpers.getSymbol(tree), sym)) { fix.postfixWith(tree, ".get()"); } return null; } }, null); return fix.build(); }
@Test public void threadLocalFix() { BugCheckerRefactoringTestHelper.newInstance(DateFormatConstant.class, getClass()) .addInputLines( "in/Test.java", "import java.text.SimpleDateFormat;", "import java.text.DateFormat;", "import java.util.Date;", "class Test {", " private static final DateFormat DATE_FORMAT =", " new SimpleDateFormat(\"yyyy-MM-dd HH:mm\");", " static String f(Date d) {", " return DATE_FORMAT.format(d);", " }", "}") .addOutputLines( "out/Test.java", "import java.text.SimpleDateFormat;", "import java.text.DateFormat;", "import java.util.Date;", "class Test {", " private static final ThreadLocal<DateFormat> dateFormat = ", " ThreadLocal.withInitial(() -> new SimpleDateFormat(\"yyyy-MM-dd HH:mm\"));", " static String f(Date d) {", " return dateFormat.get().format(d);", " }", "}") .doTest(); }
public boolean equivalent(Schema other) { return equivalent(other, EquivalenceNullablePolicy.SAME); }
@Test public void testEquivalent() { final Schema expectedNested1 = Schema.builder().addStringField("yard1").addInt64Field("yard2").build(); final Schema expectedSchema1 = Schema.builder() .addStringField("field1") .addInt64Field("field2") .addRowField("field3", expectedNested1) .addArrayField("field4", FieldType.row(expectedNested1)) .addMapField("field5", FieldType.STRING, FieldType.row(expectedNested1)) .build(); final Schema expectedNested2 = Schema.builder().addInt64Field("yard2").addStringField("yard1").build(); final Schema expectedSchema2 = Schema.builder() .addMapField("field5", FieldType.STRING, FieldType.row(expectedNested2)) .addArrayField("field4", FieldType.row(expectedNested2)) .addRowField("field3", expectedNested2) .addInt64Field("field2") .addStringField("field1") .build(); assertNotEquals(expectedSchema1, expectedSchema2); assertTrue(expectedSchema1.equivalent(expectedSchema2)); }
@Override public Num calculate(BarSeries series, Position position) { Num profitLossRatio = profitLossRatioCriterion.calculate(series, position); Num numberOfPositions = numberOfPositionsCriterion.calculate(series, position); Num numberOfWinningPositions = numberOfWinningPositionsCriterion.calculate(series, position); return calculate(series, profitLossRatio, numberOfWinningPositions, numberOfPositions); }
@Test public void calculateProfitWithShortPositions() { MockBarSeries series = new MockBarSeries(numFunction, 160, 140, 120, 100, 80, 60); TradingRecord tradingRecord = new BaseTradingRecord(Trade.sellAt(0, series), Trade.buyAt(1, series), Trade.sellAt(2, series), Trade.buyAt(5, series)); AnalysisCriterion avgLoss = getCriterion(); assertNumEquals(1.0, avgLoss.calculate(series, tradingRecord)); }
public static boolean equivalent( Expression left, Expression right, Types.StructType struct, boolean caseSensitive) { return Binder.bind(struct, Expressions.rewriteNot(left), caseSensitive) .isEquivalentTo(Binder.bind(struct, Expressions.rewriteNot(right), caseSensitive)); }
@Test public void testOrEquivalence() { assertThat( ExpressionUtil.equivalent( Expressions.or( Expressions.lessThan("id", 20), Expressions.greaterThanOrEqual("id", 34)), Expressions.or( Expressions.greaterThan("id", 33L), Expressions.lessThanOrEqual("id", 19L)), STRUCT, true)) .as("Should detect or equivalence in any order") .isTrue(); }
@SneakyThrows @Override public void write(Object o) { if (current == null) { current = o; } else { throwIllegalAddition(current, o.getClass()); } }
@Test void writeObjects() throws IOException { try (TypedObjectWriter writer = new TypedObjectWriter()){ writer.write(Map.of("a", "b")); IllegalArgumentException illegalArgumentException = Assertions.assertThrows(IllegalArgumentException.class, () -> writer.write(Map.of("c", "d"))); assertThat(illegalArgumentException.getMessage(), is("Tried to add java.util.ImmutableCollections$Map1 to java.util.ImmutableCollections$Map1")); } }
@Override public boolean isIndexed(QueryContext queryContext) { Index index = queryContext.matchIndex(attributeName, QueryContext.IndexMatchHint.PREFER_ORDERED); return index != null && index.isOrdered() && expressionCanBeUsedAsIndexPrefix(); }
@Test public void likePredicateIsNotIndexed_whenBitmapIndexIsUsed() { QueryContext queryContext = mock(QueryContext.class); when(queryContext.matchIndex("this", QueryContext.IndexMatchHint.PREFER_ORDERED)).thenReturn(createIndex(IndexType.BITMAP)); assertFalse(new LikePredicate("this", "string%").isIndexed(queryContext)); }
public void recordBeginTxn(long duration) { beginTxnTimeSensor.record(duration); }
@Test public void shouldRecordTxBeginTime() { // When: producerMetrics.recordBeginTxn(METRIC_VALUE); // Then: assertMetricValue(TXN_BEGIN_TIME_TOTAL); }
@Override public void init(ServletConfig config) throws ServletException { super.init(config); final ServletContext context = config.getServletContext(); if (null == registry) { final Object registryAttr = context.getAttribute(METRICS_REGISTRY); if (registryAttr instanceof MetricRegistry) { this.registry = (MetricRegistry) registryAttr; } else { throw new ServletException("Couldn't find a MetricRegistry instance."); } } this.allowedOrigin = context.getInitParameter(ALLOWED_ORIGIN); this.jsonpParamName = context.getInitParameter(CALLBACK_PARAM); setupMetricsModule(context); }
@Test public void constructorWithRegistryAsArgumentIsUsedInPreferenceOverServletConfig() throws Exception { final MetricRegistry metricRegistry = mock(MetricRegistry.class); final ServletContext servletContext = mock(ServletContext.class); final ServletConfig servletConfig = mock(ServletConfig.class); when(servletConfig.getServletContext()).thenReturn(servletContext); final MetricsServlet metricsServlet = new MetricsServlet(metricRegistry); metricsServlet.init(servletConfig); verify(servletConfig, times(1)).getServletContext(); verify(servletContext, never()).getAttribute(eq(MetricsServlet.METRICS_REGISTRY)); }
@Override public final void aroundWriteTo(WriterInterceptorContext context) throws IOException { final String contentEncoding = (String) context.getHeaders().getFirst(HttpHeaders.CONTENT_ENCODING); if ((contentEncoding != null) && (contentEncoding.equals("gzip") || contentEncoding.equals("x-gzip"))) { context.setOutputStream(new GZIPOutputStream(context.getOutputStream())); } context.proceed(); }
@Test void otherEncodingWillNotAroundWrite() throws IOException, WebApplicationException { MultivaluedMap<String, Object> headers = new MultivaluedHashMap<>(); headers.add(HttpHeaders.CONTENT_ENCODING, "someOtherEnc"); WriterInterceptorContextMock context = new WriterInterceptorContextMock(headers); new ConfiguredGZipEncoder(true).aroundWriteTo(context); assertThat(context.getOutputStream()).isNotInstanceOf(GZIPOutputStream.class); assertThat(context.isProceedCalled()).isTrue(); }
@Override public void store(Measure newMeasure) { saveMeasure(newMeasure.inputComponent(), (DefaultMeasure<?>) newMeasure); }
@Test public void should_save_highlighting() { DefaultInputFile file = new TestInputFileBuilder("foo", "src/Foo.php") .setContents("// comment").build(); DefaultHighlighting highlighting = new DefaultHighlighting(underTest).onFile(file).highlight(1, 0, 1, 1, TypeOfText.KEYWORD); underTest.store(highlighting); assertThat(reportWriter.hasComponentData(FileStructure.Domain.SYNTAX_HIGHLIGHTINGS, file.scannerId())).isTrue(); }
@Override public void pre(SpanAdapter span, Exchange exchange, Endpoint endpoint) { super.pre(span, exchange, endpoint); String partition = getValue(exchange, PARTITION, Integer.class); if (partition != null) { span.setTag(KAFKA_PARTITION_TAG, partition); } String partitionKey = exchange.getIn().getHeader(PARTITION_KEY, String.class); if (partitionKey != null) { span.setTag(KAFKA_PARTITION_KEY_TAG, partitionKey); } String key = exchange.getIn().getHeader(KEY, String.class); if (key != null) { span.setTag(KAFKA_KEY_TAG, key); } String offset = getValue(exchange, OFFSET, String.class); if (offset != null) { span.setTag(KAFKA_OFFSET_TAG, offset); } }
@Test public void testPreOffsetAndPartitionAsStringHeader() { String testKey = "TestKey"; String testOffset = "TestOffset"; String testPartition = "TestPartition"; String testPartitionKey = "TestPartitionKey"; Endpoint endpoint = Mockito.mock(Endpoint.class); Exchange exchange = Mockito.mock(Exchange.class); Message message = Mockito.mock(Message.class); Mockito.when(endpoint.getEndpointUri()).thenReturn("test"); Mockito.when(exchange.getIn()).thenReturn(message); Mockito.when(message.getHeader(KafkaSpanDecorator.KEY, String.class)).thenReturn(testKey); Mockito.when(message.getHeader(KafkaSpanDecorator.OFFSET, String.class)).thenReturn(testOffset); Mockito.when(message.getHeader(KafkaSpanDecorator.PARTITION, String.class)).thenReturn(testPartition); Mockito.when(message.getHeader(KafkaSpanDecorator.PARTITION_KEY, String.class)).thenReturn(testPartitionKey); SpanDecorator decorator = new KafkaSpanDecorator(); MockSpanAdapter span = new MockSpanAdapter(); decorator.pre(span, exchange, endpoint); assertEquals(testKey, span.tags().get(KafkaSpanDecorator.KAFKA_KEY_TAG)); assertEquals(testOffset, span.tags().get(KafkaSpanDecorator.KAFKA_OFFSET_TAG)); assertEquals(testPartition, span.tags().get(KafkaSpanDecorator.KAFKA_PARTITION_TAG)); assertEquals(testPartitionKey, span.tags().get(KafkaSpanDecorator.KAFKA_PARTITION_KEY_TAG)); }
public static boolean deleteFile(String path, String fileName) { File file = Paths.get(path, fileName).toFile(); if (file.exists()) { return file.delete(); } return false; }
@Test void testDeleteFile() throws IOException { File tmpFile = DiskUtils.createTmpFile(UUID.randomUUID().toString(), ".ut"); assertTrue(DiskUtils.deleteFile(tmpFile.getParent(), tmpFile.getName())); assertFalse(DiskUtils.deleteFile(tmpFile.getParent(), tmpFile.getName())); }
@Override public void updateBrokerConfig(String brokerAddr, Properties properties) throws RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException, UnsupportedEncodingException, InterruptedException, MQBrokerException, MQClientException { defaultMQAdminExtImpl.updateBrokerConfig(brokerAddr, properties); }
@Test public void testUpdateBrokerConfig() throws InterruptedException, RemotingConnectException, UnsupportedEncodingException, RemotingTimeoutException, MQBrokerException, RemotingSendRequestException { Properties result = defaultMQAdminExt.getBrokerConfig("127.0.0.1:10911"); assertThat(result.getProperty("maxMessageSize")).isEqualTo("5000000"); assertThat(result.getProperty("flushDelayOffsetInterval")).isEqualTo("15000"); assertThat(result.getProperty("serverSocketRcvBufSize")).isEqualTo("655350"); }
@Override public void run() { try { // make sure we call afterRun() even on crashes // and operate countdown latches, else we may hang the parallel runner if (steps == null) { beforeRun(); } if (skipped) { return; } int count = steps.size(); int index = 0; while ((index = nextStepIndex()) < count) { currentStep = steps.get(index); execute(currentStep); if (currentStepResult != null) { // can be null if debug step-back or hook skip result.addStepResult(currentStepResult); } } } catch (Exception e) { if (currentStepResult != null) { result.addStepResult(currentStepResult); } logError("scenario [run] failed\n" + StringUtils.throwableToString(e)); currentStepResult = result.addFakeStepResult("scenario [run] failed", e); } finally { if (!skipped) { afterRun(); if (isFailed() && engine.getConfig().isAbortSuiteOnFailure()) { featureRuntime.suite.abort(); } } if (caller.isNone()) { logAppender.close(); // reclaim memory } } }
@Test void testXmlEmbeddedExpressionFailuresAreNotBlockers() { run( "def expected = <foo att='#(bar)'>#(bar)</foo>", "def actual = <foo att=\"test\">test</foo>", "def bar = 'test'", "match actual == expected" ); }
public String extractAppIdFromRequest(HttpServletRequest request) { String appId = null; String servletPath = request.getServletPath(); if (StringUtils.startsWith(servletPath, URL_CONFIGS_PREFIX)) { appId = StringUtils.substringBetween(servletPath, URL_CONFIGS_PREFIX, URL_SEPARATOR); } else if (StringUtils.startsWith(servletPath, URL_CONFIGFILES_JSON_PREFIX)) { appId = StringUtils.substringBetween(servletPath, URL_CONFIGFILES_JSON_PREFIX, URL_SEPARATOR); } else if (StringUtils.startsWith(servletPath, URL_CONFIGFILES_PREFIX)) { appId = StringUtils.substringBetween(servletPath, URL_CONFIGFILES_PREFIX, URL_SEPARATOR); } else if (StringUtils.startsWith(servletPath, URL_NOTIFICATIONS_PREFIX)) { appId = request.getParameter("appId"); } return appId; }
@Test public void testExtractAppIdFromRequest4() { when(request.getServletPath()).thenReturn("/notifications/v2"); when(request.getParameter("appId")).thenReturn("someAppId"); String appId = accessKeyUtil.extractAppIdFromRequest(request); assertThat(appId).isEqualTo("someAppId"); }
@Override public void updatePort(Port osPort) { checkNotNull(osPort, ERR_NULL_PORT); checkArgument(!Strings.isNullOrEmpty(osPort.getId()), ERR_NULL_PORT_ID); checkArgument(!Strings.isNullOrEmpty(osPort.getNetworkId()), ERR_NULL_PORT_NET_ID); osNetworkStore.updatePort(osPort); log.info(String.format(MSG_PORT, osPort.getId(), MSG_UPDATED)); }
@Test(expected = IllegalArgumentException.class) public void testUpdateUnregisteredPort() { target.updatePort(PORT); }
public boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ) { return rowSet.putRowWait( rowMeta, rowData, time, tu ); }
@Test public void testPutRowWait() { rowProducer.putRowWait( rowMeta, rowData, 1, TimeUnit.MILLISECONDS ); verify( rowSet, times( 1 ) ).putRowWait( rowMeta, rowData, 1, TimeUnit.MILLISECONDS ); }
public byte[] save(ScriptDefinition script) { int[] instructions = script.getInstructions(); int[] intOperands = script.getIntOperands(); String[] stringOperands = script.getStringOperands(); Map<Integer, Integer>[] switches = script.getSwitches(); OutputStream out = new OutputStream(); out.writeByte(0); // null string for (int i = 0; i < instructions.length; ++i) { int opcode = instructions[i]; out.writeShort(opcode); if (opcode == SCONST) { out.writeString(stringOperands[i]); } else if (opcode < 100 && opcode != RETURN && opcode != POP_INT && opcode != POP_STRING) { out.writeInt(intOperands[i]); } else { out.writeByte(intOperands[i]); } } out.writeInt(instructions.length); out.writeShort(script.getLocalIntCount()); out.writeShort(script.getLocalStringCount()); out.writeShort(script.getIntStackCount()); out.writeShort(script.getStringStackCount()); int switchStart = out.getOffset(); if (switches == null) { out.writeByte(0); } else { out.writeByte(switches.length); for (Map<Integer, Integer> s : switches) { out.writeShort(s.size()); for (Entry<Integer, Integer> e : s.entrySet()) { out.writeInt(e.getKey()); out.writeInt(e.getValue()); } } } int switchLength = out.getOffset() - switchStart; out.writeShort(switchLength); return out.flip(); }
@Test public void testSave() throws IOException { Instructions instructions = new Instructions(); instructions.init(); ScriptDefinition script = new Assembler(instructions).assemble(getClass().getResourceAsStream(SCRIPT_RESOURCE)); byte[] saved = new ScriptSaver().save(script); ScriptDefinition loadedScripot = new ScriptLoader().load(91, saved); assertEquals(script, loadedScripot); }
@VisibleForTesting int getPort() { Preconditions.checkState(httpServer != null, "Server has not been initialized."); return port; }
@Test void cannotStartTwoReportersOnSamePort() { assertThatThrownBy( () -> new PrometheusReporter( Collections.singleton(reporter.getPort()).iterator())) .isInstanceOf(Exception.class); }
@VisibleForTesting static DateRangeBucket buildDateRangeBuckets(TimeRange timeRange, long searchWithinMs, long executeEveryMs) { final ImmutableList.Builder<DateRange> ranges = ImmutableList.builder(); DateTime from = timeRange.getFrom(); DateTime to; do { // The smallest configurable unit is 1 sec. // By dividing it before casting we avoid a potential int overflow to = from.plusSeconds((int) (searchWithinMs / 1000)); ranges.add(DateRange.builder().from(from).to(to).build()); from = from.plusSeconds((int) executeEveryMs / 1000); } while (to.isBefore(timeRange.getTo())); return DateRangeBucket.builder().field("timestamp").ranges(ranges.build()).build(); }
@Test public void testDateRangeBucketWithSlidingWindow() { final long processingWindowSize = Duration.standardSeconds(3600).getMillis(); final long processingHopSize = Duration.standardSeconds(60).getMillis(); final DateTime now = DateTime.now(DateTimeZone.UTC); final DateTime from = now; final DateTime to = now.plusMillis((int) processingWindowSize); TimeRange timeRange = AbsoluteRange.create(from, to); final DateRangeBucket rangeBucket = PivotAggregationSearch.buildDateRangeBuckets(timeRange, processingWindowSize, processingHopSize); assertThat(rangeBucket.ranges()).containsExactly( DateRange.create(from, to) ); }
public static <T> T nullIsIllegal(T item, String message) { if (item == null) { log.error(message); throw new IllegalArgumentException(message); } return item; }
@Test(expected = IllegalArgumentException.class) public void testNullIsIllegalThrow() { Tools.nullIsIllegal(null, "Not found!"); fail("Should've thrown some thing"); }
public static <T> T visit(final Schema start, final SchemaVisitor<T> visitor) { // Set of Visited Schemas IdentityHashMap<Schema, Schema> visited = new IdentityHashMap<>(); // Stack that contains the Schemas to process and afterVisitNonTerminal // functions. // Deque<Either<Schema, Supplier<SchemaVisitorAction>>> // Using Either<...> has a cost we want to avoid... Deque<Object> dq = new ArrayDeque<>(); dq.push(start); Object current; while ((current = dq.poll()) != null) { if (current instanceof Supplier) { // We are executing a non-terminal post visit. SchemaVisitor.SchemaVisitorAction action = ((Supplier<SchemaVisitor.SchemaVisitorAction>) current).get(); switch (action) { case CONTINUE: break; case SKIP_SIBLINGS: while (dq.peek() instanceof Schema) { dq.remove(); } break; case TERMINATE: return visitor.get(); case SKIP_SUBTREE: default: throw new UnsupportedOperationException("Invalid action " + action); } } else { Schema schema = (Schema) current; boolean terminate; if (visited.containsKey(schema)) { terminate = visitTerminal(visitor, schema, dq); } else { Schema.Type type = schema.getType(); switch (type) { case ARRAY: terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getElementType())); visited.put(schema, schema); break; case RECORD: terminate = visitNonTerminal(visitor, schema, dq, () -> schema.getFields().stream().map(Field::schema) .collect(Collectors.toCollection(ArrayDeque::new)).descendingIterator()); visited.put(schema, schema); break; case UNION: terminate = visitNonTerminal(visitor, schema, dq, schema.getTypes()); visited.put(schema, schema); break; case MAP: terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getValueType())); visited.put(schema, schema); break; default: terminate = visitTerminal(visitor, schema, dq); break; } } if (terminate) { return visitor.get(); } } } return visitor.get(); }
@Test(expected = UnsupportedOperationException.class) public void testVisit10() { String s10 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": [" + "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"ct2\", \"fields\": " + "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"int\"}" + "]}"; Schemas.visit(new Schema.Parser().parse(s10), new TestVisitor() { @Override public SchemaVisitorAction visitTerminal(Schema terminal) { return SchemaVisitorAction.SKIP_SUBTREE; } }); }
public static NameMapping of(MappedField... fields) { return new NameMapping(MappedFields.of(ImmutableList.copyOf(fields))); }
@Test public void testFailsDuplicateName() { assertThatThrownBy( () -> new NameMapping(MappedFields.of(MappedField.of(1, "x"), MappedField.of(2, "x")))) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Multiple entries with same key: x=2 and x=1"); }
public CredentialRetriever dockerCredentialHelper(String credentialHelper) { return dockerCredentialHelper(Paths.get(credentialHelper)); }
@Test public void testDockerCredentialHelperWithEnvironment() throws CredentialRetrievalException { Map<String, String> environment = Collections.singletonMap("ENV_VARIABLE", "Value"); CredentialRetrieverFactory credentialRetrieverFactory = createCredentialRetrieverFactory("registry", "repo", environment); Assert.assertEquals( Optional.of(FAKE_CREDENTIALS), credentialRetrieverFactory .dockerCredentialHelper(Paths.get("docker-credential-foo")) .retrieve()); Mockito.verify(mockDockerCredentialHelperFactory) .create("registry", Paths.get("docker-credential-foo"), environment); Mockito.verify(mockLogger) .accept( LogEvent.lifecycle("Using credential helper docker-credential-foo for registry/repo")); }
@Override public Long dbSize(RedisClusterNode node) { return execute(node, RedisCommands.DBSIZE); }
@Test public void testDbSize() { RedisClusterNode master = getFirstMaster(); Long size = connection.dbSize(master); assertThat(size).isZero(); }
@Override public boolean matchesJdbcUrl(String jdbcConnectionURL) { return StringUtils.startsWithIgnoreCase(jdbcConnectionURL, "jdbc:sqlserver:"); }
@Test public void matchesJdbcURL() { assertThat(underTest.matchesJdbcUrl("jdbc:sqlserver://localhost:1433;databasename=sonar")).isTrue(); assertThat(underTest.matchesJdbcUrl("jdbc:hsql:foo")).isFalse(); }
@Udf public <T> String toJsonString(@UdfParameter final T input) { return toJson(input); }
@Test public void shouldSerializeStruct() { // When: final Schema schema = SchemaBuilder.struct() .field("id", Schema.INT32_SCHEMA) .field("name", Schema.STRING_SCHEMA) .build(); final Struct struct = new Struct(schema); struct.put("id", 1); struct.put("name", "Alice"); final String result = udf.toJsonString(struct); // Then: assertEquals("{\"id\":1,\"name\":\"Alice\"}", result); }
@Override public WebSocketClientExtension handshakeExtension(WebSocketExtensionData extensionData) { if (!PERMESSAGE_DEFLATE_EXTENSION.equals(extensionData.name())) { return null; } boolean succeed = true; int clientWindowSize = MAX_WINDOW_SIZE; int serverWindowSize = MAX_WINDOW_SIZE; boolean serverNoContext = false; boolean clientNoContext = false; Iterator<Entry<String, String>> parametersIterator = extensionData.parameters().entrySet().iterator(); while (succeed && parametersIterator.hasNext()) { Entry<String, String> parameter = parametersIterator.next(); if (CLIENT_MAX_WINDOW.equalsIgnoreCase(parameter.getKey())) { // allowed client_window_size_bits if (allowClientWindowSize) { clientWindowSize = Integer.parseInt(parameter.getValue()); if (clientWindowSize > MAX_WINDOW_SIZE || clientWindowSize < MIN_WINDOW_SIZE) { succeed = false; } } else { succeed = false; } } else if (SERVER_MAX_WINDOW.equalsIgnoreCase(parameter.getKey())) { // acknowledged server_window_size_bits serverWindowSize = Integer.parseInt(parameter.getValue()); if (serverWindowSize > MAX_WINDOW_SIZE || serverWindowSize < MIN_WINDOW_SIZE) { succeed = false; } } else if (CLIENT_NO_CONTEXT.equalsIgnoreCase(parameter.getKey())) { // allowed client_no_context_takeover if (allowClientNoContext) { clientNoContext = true; } else { succeed = false; } } else if (SERVER_NO_CONTEXT.equalsIgnoreCase(parameter.getKey())) { // acknowledged server_no_context_takeover serverNoContext = true; } else { // unknown parameter succeed = false; } } if ((requestedServerNoContext && !serverNoContext) || requestedServerWindowSize < serverWindowSize) { succeed = false; } if (succeed) { return new PermessageDeflateExtension(serverNoContext, serverWindowSize, clientNoContext, clientWindowSize, extensionFilterProvider); } else { return null; } }
@Test public void testParameterValidation() { WebSocketClientExtension extension; Map<String, String> parameters; PerMessageDeflateClientExtensionHandshaker handshaker = new PerMessageDeflateClientExtensionHandshaker(6, true, 15, true, false); parameters = new HashMap<String, String>(); parameters.put(CLIENT_MAX_WINDOW, "15"); parameters.put(SERVER_MAX_WINDOW, "8"); extension = handshaker.handshakeExtension(new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, parameters)); // Test that handshake succeeds when parameters are valid assertNotNull(extension); assertEquals(RSV1, extension.rsv()); assertTrue(extension.newExtensionDecoder() instanceof PerMessageDeflateDecoder); assertTrue(extension.newExtensionEncoder() instanceof PerMessageDeflateEncoder); parameters = new HashMap<String, String>(); parameters.put(CLIENT_MAX_WINDOW, "15"); parameters.put(SERVER_MAX_WINDOW, "7"); extension = handshaker.handshakeExtension(new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, parameters)); // Test that handshake fails when parameters are invalid assertNull(extension); }
public static boolean safeCollectionEquals(final Collection<Comparable<?>> sources, final Collection<Comparable<?>> targets) { List<Comparable<?>> all = new ArrayList<>(sources); all.addAll(targets); Optional<Class<?>> clazz = getTargetNumericType(all); if (!clazz.isPresent()) { return sources.equals(targets); } List<Comparable<?>> sourceClasses = sources.stream().map(each -> parseNumberByClazz(each.toString(), clazz.get())).collect(Collectors.toList()); List<Comparable<?>> targetClasses = targets.stream().map(each -> parseNumberByClazz(each.toString(), clazz.get())).collect(Collectors.toList()); return sourceClasses.equals(targetClasses); }
@Test void assertSafeCollectionEqualsForBigDecimal() { List<Comparable<?>> sources = Arrays.asList(10.01, 12.01); List<Comparable<?>> targets = Arrays.asList(BigDecimal.valueOf(10.01), BigDecimal.valueOf(12.01)); assertTrue(SafeNumberOperationUtils.safeCollectionEquals(sources, targets)); }
public Set<PValue> getKeyedPValues() { checkState( finalized, "can't call getKeyedPValues before a Pipeline has been completely traversed"); return keyedValues; }
@Test public void groupByKeyProducesKeyedOutput() { PCollection<KV<String, Iterable<Integer>>> keyed = p.apply(Create.of(KV.of("foo", 3))) .apply(new DirectGroupByKeyOnly<>()) .apply( new DirectGroupAlsoByWindow<>( WindowingStrategy.globalDefault(), WindowingStrategy.globalDefault())); p.traverseTopologically(visitor); assertThat(visitor.getKeyedPValues(), hasItem(keyed)); }
@Override public CompletableFuture<Void> offload(ReadHandle readHandle, UUID uuid, Map<String, String> extraMetadata) { final String managedLedgerName = extraMetadata.get(MANAGED_LEDGER_NAME); final String topicName = TopicName.fromPersistenceNamingEncoding(managedLedgerName); CompletableFuture<Void> promise = new CompletableFuture<>(); scheduler.chooseThread(readHandle.getId()).execute(() -> { final BlobStore writeBlobStore = getBlobStore(config.getBlobStoreLocation()); log.info("offload {} uuid {} extraMetadata {} to {} {}", readHandle.getId(), uuid, extraMetadata, config.getBlobStoreLocation(), writeBlobStore); if (readHandle.getLength() == 0 || !readHandle.isClosed() || readHandle.getLastAddConfirmed() < 0) { promise.completeExceptionally( new IllegalArgumentException("An empty or open ledger should never be offloaded")); return; } OffloadIndexBlockBuilder indexBuilder = OffloadIndexBlockBuilder.create() .withLedgerMetadata(readHandle.getLedgerMetadata()) .withDataBlockHeaderLength(BlockAwareSegmentInputStreamImpl.getHeaderSize()); String dataBlockKey = DataBlockUtils.dataBlockOffloadKey(readHandle.getId(), uuid); String indexBlockKey = DataBlockUtils.indexBlockOffloadKey(readHandle.getId(), uuid); log.info("ledger {} dataBlockKey {} indexBlockKey {}", readHandle.getId(), dataBlockKey, indexBlockKey); MultipartUpload mpu = null; List<MultipartPart> parts = Lists.newArrayList(); // init multi part upload for data block. try { BlobBuilder blobBuilder = writeBlobStore.blobBuilder(dataBlockKey); Map<String, String> objectMetadata = new HashMap<>(userMetadata); objectMetadata.put("role", "data"); if (extraMetadata != null) { objectMetadata.putAll(extraMetadata); } DataBlockUtils.addVersionInfo(blobBuilder, objectMetadata); Blob blob = blobBuilder.build(); log.info("initiateMultipartUpload bucket {}, metadata {} ", config.getBucket(), blob.getMetadata()); mpu = writeBlobStore.initiateMultipartUpload(config.getBucket(), blob.getMetadata(), new PutOptions()); } catch (Throwable t) { promise.completeExceptionally(t); return; } long dataObjectLength = 0; // start multi part upload for data block. try { long startEntry = 0; int partId = 1; long start = System.nanoTime(); long entryBytesWritten = 0; while (startEntry <= readHandle.getLastAddConfirmed()) { int blockSize = BlockAwareSegmentInputStreamImpl .calculateBlockSize(config.getMaxBlockSizeInBytes(), readHandle, startEntry, entryBytesWritten); try (BlockAwareSegmentInputStream blockStream = new BlockAwareSegmentInputStreamImpl( readHandle, startEntry, blockSize, this.offloaderStats, managedLedgerName)) { Payload partPayload = Payloads.newInputStreamPayload(blockStream); partPayload.getContentMetadata().setContentLength((long) blockSize); partPayload.getContentMetadata().setContentType("application/octet-stream"); parts.add(writeBlobStore.uploadMultipartPart(mpu, partId, partPayload)); log.debug("UploadMultipartPart. container: {}, blobName: {}, partId: {}, mpu: {}", config.getBucket(), dataBlockKey, partId, mpu.id()); indexBuilder.addBlock(startEntry, partId, blockSize); if (blockStream.getEndEntryId() != -1) { startEntry = blockStream.getEndEntryId() + 1; } else { // could not read entry from ledger. break; } entryBytesWritten += blockStream.getBlockEntryBytesCount(); partId++; this.offloaderStats.recordOffloadBytes(topicName, blockStream.getBlockEntryBytesCount()); } dataObjectLength += blockSize; } String etag = writeBlobStore.completeMultipartUpload(mpu, parts); log.info("Ledger {}, upload finished, etag {}", readHandle.getId(), etag); mpu = null; } catch (Throwable t) { try { if (mpu != null) { writeBlobStore.abortMultipartUpload(mpu); } } catch (Throwable throwable) { log.error("Failed abortMultipartUpload in bucket - {} with key - {}, uploadId - {}.", config.getBucket(), dataBlockKey, mpu.id(), throwable); } this.offloaderStats.recordWriteToStorageError(topicName); this.offloaderStats.recordOffloadError(topicName); promise.completeExceptionally(t); return; } // upload index block try (OffloadIndexBlock index = indexBuilder.withDataObjectLength(dataObjectLength).build(); IndexInputStream indexStream = index.toStream()) { // write the index block BlobBuilder blobBuilder = writeBlobStore.blobBuilder(indexBlockKey); Map<String, String> objectMetadata = new HashMap<>(userMetadata); objectMetadata.put("role", "index"); if (extraMetadata != null) { objectMetadata.putAll(extraMetadata); } DataBlockUtils.addVersionInfo(blobBuilder, objectMetadata); Payload indexPayload = Payloads.newInputStreamPayload(indexStream); indexPayload.getContentMetadata().setContentLength((long) indexStream.getStreamSize()); indexPayload.getContentMetadata().setContentType("application/octet-stream"); Blob blob = blobBuilder .payload(indexPayload) .contentLength((long) indexStream.getStreamSize()) .build(); writeBlobStore.putBlob(config.getBucket(), blob); promise.complete(null); } catch (Throwable t) { try { writeBlobStore.removeBlob(config.getBucket(), dataBlockKey); } catch (Throwable throwable) { log.error("Failed deleteObject in bucket - {} with key - {}.", config.getBucket(), dataBlockKey, throwable); } this.offloaderStats.recordWriteToStorageError(topicName); this.offloaderStats.recordOffloadError(topicName); promise.completeExceptionally(t); return; } }); return promise; }
@Test public void testOffloadFailDataBlockUploadComplete() throws Exception { ReadHandle readHandle = buildReadHandle(); UUID uuid = UUID.randomUUID(); String failureString = "fail DataBlockUploadComplete"; // mock throw exception when completeMultipartUpload try { BlobStore spiedBlobStore = mock(BlobStore.class, delegatesTo(blobStore)); Mockito .doThrow(new RuntimeException(failureString)) .when(spiedBlobStore).completeMultipartUpload(any(), any()); Mockito .doNothing() .when(spiedBlobStore).abortMultipartUpload(any()); BlobStoreManagedLedgerOffloader offloader = getOffloader(spiedBlobStore); offloader.offload(readHandle, uuid, new HashMap<>()).get(); Assert.fail("Should throw exception for when completeMultipartUpload"); } catch (Exception e) { // excepted Assert.assertTrue(e.getCause() instanceof RuntimeException); Assert.assertTrue(e.getCause().getMessage().contains(failureString)); Assert.assertFalse(blobStore.blobExists(BUCKET, DataBlockUtils.dataBlockOffloadKey(readHandle.getId(), uuid))); Assert.assertFalse(blobStore.blobExists(BUCKET, DataBlockUtils.indexBlockOffloadKey(readHandle.getId(), uuid))); } }
public DBCollection getDbCollection() { return dbCollection; }
@Test void getDbCollection() { final var collection = jacksonCollection("simple", Simple.class); assertThat(collection.getDbCollection()).isEqualTo( mongoDBTestService.mongoConnection().getDatabase().getCollection("simple")); }
public static SumSquaresRatio[] fit(DataFrame data, String clazz) { BaseVector<?, ?, ?> y = data.column(clazz); ClassLabels codec = ClassLabels.fit(y); if (codec.k < 2) { throw new UnsupportedOperationException("Invalid number of classes: " + codec.k); } int n = data.nrow(); int k = codec.k; int[] nc = new int[k]; double[] condmu = new double[k]; for (int i = 0; i < n; i++) { int yi = codec.y[i]; nc[yi]++; } StructType schema = data.schema(); return IntStream.range(0, schema.length()).mapToObj(j -> { StructField field = schema.field(j); if (field.isNumeric()) { BaseVector<?, ?, ?> xj = data.column(j); double mu = 0.0; Arrays.fill(condmu, 0.0); for (int i = 0; i < n; i++) { int yi = codec.y[i]; double xij = xj.getDouble(i); mu += xij; condmu[yi] += xij; } mu /= n; for (int i = 0; i < k; i++) { condmu[i] /= nc[i]; } double wss = 0.0; double bss = 0.0; for (int i = 0; i < n; i++) { int yi = codec.y[i]; double xij = xj.getDouble(i); bss += MathEx.pow2(condmu[yi] - mu); wss += MathEx.pow2(xij - condmu[yi]); } return new SumSquaresRatio(field.name, bss / wss); } else { return null; } }).filter(s2n -> s2n != null && !s2n.feature.equals(clazz)).toArray(SumSquaresRatio[]::new); }
@Test public void tesUSPS() { System.out.println("USPS"); SumSquaresRatio[] score = SumSquaresRatio.fit(USPS.train, "class"); Arrays.sort(score); String[] columns = Arrays.stream(score).limit(121).map(s -> s.feature).toArray(String[]::new); double[][] train = USPS.formula.x(USPS.train.drop(columns)).toArray(); LDA lda = LDA.fit(train, USPS.y); double[][] test = USPS.formula.x(USPS.test.drop(columns)).toArray(); int[] prediction = lda.predict(test); double accuracy = new Accuracy().score(USPS.testy, prediction); System.out.format("SSR %.2f%%%n", 100 * accuracy); assertEquals(0.86, accuracy, 1E-2); }
@Override public ConfigErrors errors() { return errors; }
@Test public void shouldValidatePipelineLabelWithBrokenTruncationSyntax1() { String labelFormat = "pipeline-${COUNT}-${git[:7}-alpha"; PipelineConfig pipelineConfig = createAndValidatePipelineLabel(labelFormat); String expectedLabelTemplate = "Invalid label 'pipeline-${COUNT}-${git[:7}-alpha'."; assertThat(pipelineConfig.errors().on(PipelineConfig.LABEL_TEMPLATE), startsWith(expectedLabelTemplate)); }
void addTransactionsDependingOn(Set<Transaction> txSet, Set<Transaction> txPool) { Map<Sha256Hash, Transaction> txQueue = new LinkedHashMap<>(); for (Transaction tx : txSet) { txQueue.put(tx.getTxId(), tx); } while(!txQueue.isEmpty()) { Transaction tx = txQueue.remove(txQueue.keySet().iterator().next()); for (Transaction anotherTx : txPool) { if (anotherTx.equals(tx)) continue; for (TransactionInput input : anotherTx.getInputs()) { if (input.getOutpoint().hash().equals(tx.getTxId())) { if (txQueue.get(anotherTx.getTxId()) == null) { txQueue.put(anotherTx.getTxId(), anotherTx); txSet.add(anotherTx); } } } } } }
@Test public void testAddTransactionsDependingOn() throws Exception { sendMoneyToWallet(AbstractBlockChain.NewBlockType.BEST_CHAIN, valueOf(2, 0)); Transaction send1 = Objects.requireNonNull(wallet.createSend(OTHER_ADDRESS, valueOf(1, 0), true)); Transaction send2 = Objects.requireNonNull(wallet.createSend(OTHER_ADDRESS, valueOf(1, 20), true)); wallet.commitTx(send1); Transaction send1b = Objects.requireNonNull(wallet.createSend(OTHER_ADDRESS, valueOf(0, 50), true)); wallet.commitTx(send1b); Transaction send1c = Objects.requireNonNull(wallet.createSend(OTHER_ADDRESS, valueOf(0, 25), true)); wallet.commitTx(send1c); wallet.commitTx(send2); Set<Transaction> txns = new HashSet<>(); txns.add(send1); wallet.addTransactionsDependingOn(txns, wallet.getTransactions(true)); assertEquals(3, txns.size()); assertTrue(txns.contains(send1)); assertTrue(txns.contains(send1b)); assertTrue(txns.contains(send1c)); }
@Override public PollResult poll(long currentTimeMs) { return pollInternal( prepareFetchRequests(), this::handleFetchSuccess, this::handleFetchFailure ); }
@Test public void testFetchCompletedBeforeHandlerAdded() { buildFetcher(); assignFromUser(singleton(tp0)); subscriptions.seek(tp0, 0); sendFetches(); client.prepareResponse(fullFetchResponse(tidp0, buildRecords(1L, 1, 1), Errors.NONE, 100L, 0)); networkClientDelegate.poll(time.timer(0)); fetchRecords(); Metadata.LeaderAndEpoch leaderAndEpoch = subscriptions.position(tp0).currentLeader; assertTrue(leaderAndEpoch.leader.isPresent()); Node readReplica = fetcher.selectReadReplica(tp0, leaderAndEpoch.leader.get(), time.milliseconds()); AtomicBoolean wokenUp = new AtomicBoolean(false); client.setWakeupHook(() -> { if (!wokenUp.getAndSet(true)) { networkClientDelegate.disconnectAsync(readReplica); networkClientDelegate.poll(time.timer(0)); } }); assertEquals(1, sendFetches()); networkClientDelegate.disconnectAsync(readReplica); networkClientDelegate.poll(time.timer(0)); assertEquals(1, sendFetches()); }
@Override public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) { IdentityProvider provider = resolveProviderOrHandleResponse(request, response, CALLBACK_PATH); if (provider != null) { handleProvider(request, response, provider); } }
@Test public void redirect_with_context_path_when_failing_because_of_UnauthorizedExceptionException() throws Exception { when(request.getContextPath()).thenReturn("/sonarqube"); FailWithUnauthorizedExceptionIdProvider identityProvider = new FailWithUnauthorizedExceptionIdProvider(); when(request.getRequestURI()).thenReturn("/sonarqube/oauth2/callback/" + identityProvider.getKey()); identityProviderRepository.addIdentityProvider(identityProvider); underTest.doFilter(request, response, chain); verify(response).sendRedirect("/sonarqube/sessions/unauthorized"); verify(oAuthRedirection).delete(request, response); }
public void onMessage(Message message) { LOG.trace("Invoking MessageEndpoint.onMethod()"); state.onMessage(this, message); }
@Test(timeout = 60000) public void testOnMessageFailure() throws Exception { setupBeforeDeliverySuccessful(); context.checking(new Expectations() {{ oneOf (mockEndpointAndListener).onMessage(with(same(stubMessage))); will(throwException(new RuntimeException())); }}); setupAfterDeliverySuccessful(); doBeforeDeliveryExpectSuccess(); try { endpointProxy.onMessage(stubMessage); fail("An exception should have been thrown"); } catch (Exception e) { assertTrue(true); } doAfterDeliveryExpectSuccess(); }
public CompletableFuture<InetSocketAddress> resolveAndCheckTargetAddress(String hostAndPort) { int pos = hostAndPort.lastIndexOf(':'); String host = hostAndPort.substring(0, pos); int port = Integer.parseInt(hostAndPort.substring(pos + 1)); if (!isPortAllowed(port)) { return FutureUtil.failedFuture( new TargetAddressDeniedException("Given port in '" + hostAndPort + "' isn't allowed.")); } else if (!isHostAllowed(host)) { return FutureUtil.failedFuture( new TargetAddressDeniedException("Given host in '" + hostAndPort + "' isn't allowed.")); } else { return NettyFutureUtil.toCompletableFuture( inetSocketAddressResolver.resolve(InetSocketAddress.createUnresolved(host, port))) .thenCompose(resolvedAddress -> { CompletableFuture<InetSocketAddress> result = new CompletableFuture<>(); if (isIPAddressAllowed(resolvedAddress)) { result.complete(resolvedAddress); } else { result.completeExceptionally(new TargetAddressDeniedException( "The IP address of the given host and port '" + hostAndPort + "' isn't allowed.")); } return result; }); } }
@Test public void shouldAllowValidInput() throws Exception { BrokerProxyValidator brokerProxyValidator = new BrokerProxyValidator( createMockedAddressResolver("1.2.3.4"), "myhost" , "1.2.0.0/16" , "6650"); InetSocketAddress inetSocketAddress = brokerProxyValidator.resolveAndCheckTargetAddress("myhost:6650").get(); assertNotNull(inetSocketAddress); assertEquals(inetSocketAddress.getAddress().getHostAddress(), "1.2.3.4"); assertEquals(inetSocketAddress.getPort(), 6650); }
@Udf public String chr(@UdfParameter( description = "Decimal codepoint") final Integer decimalCode) { if (decimalCode == null) { return null; } if (!Character.isValidCodePoint(decimalCode)) { return null; } final char[] resultChars = Character.toChars(decimalCode); return String.valueOf(resultChars); }
@Test public void shouldReturnNullForNullStringInput() { final String result = udf.chr((String) null); assertThat(result, is(nullValue())); }
public static Collection<Member> multiParse(Collection<String> addresses) { List<Member> members = new ArrayList<>(addresses.size()); for (String address : addresses) { Member member = singleParse(address); members.add(member); } return members; }
@Test void testMultiParse() { Collection<String> address = new HashSet<>(); address.add("1.1.1.1:3306"); address.add("1.1.1.1"); Collection<Member> actual = MemberUtil.multiParse(address); assertEquals(2, actual.size()); }
public CruiseConfig deserializeConfig(String content) throws Exception { String md5 = md5Hex(content); Element element = parseInputStream(new ByteArrayInputStream(content.getBytes())); LOGGER.debug("[Config Save] Updating config cache with new XML"); CruiseConfig configForEdit = classParser(element, BasicCruiseConfig.class, configCache, new GoCipher(), registry, new ConfigReferenceElements()).parse(); setMd5(configForEdit, md5); configForEdit.setOrigins(new FileConfigOrigin()); return configForEdit; }
@Test void shouldLoadPipelineWithTimer() throws Exception { CruiseConfig config = xmlLoader.deserializeConfig(PIPELINE_WITH_TIMER); PipelineConfig pipelineConfig = config.pipelineConfigByName(new CaseInsensitiveString("pipeline")); assertThat(pipelineConfig.getTimer()).isEqualTo(new TimerConfig("0 15 10 ? * MON-FRI", false)); }
@Override public Long createFileConfig(FileConfigSaveReqVO createReqVO) { FileConfigDO fileConfig = FileConfigConvert.INSTANCE.convert(createReqVO) .setConfig(parseClientConfig(createReqVO.getStorage(), createReqVO.getConfig())) .setMaster(false); // 默认非 master fileConfigMapper.insert(fileConfig); return fileConfig.getId(); }
@Test public void testCreateFileConfig_success() { // 准备参数 Map<String, Object> config = MapUtil.<String, Object>builder().put("basePath", "/yunai") .put("domain", "https://www.iocoder.cn").build(); FileConfigSaveReqVO reqVO = randomPojo(FileConfigSaveReqVO.class, o -> o.setStorage(FileStorageEnum.LOCAL.getStorage()).setConfig(config)) .setId(null); // 避免 id 被赋值 // 调用 Long fileConfigId = fileConfigService.createFileConfig(reqVO); // 断言 assertNotNull(fileConfigId); // 校验记录的属性是否正确 FileConfigDO fileConfig = fileConfigMapper.selectById(fileConfigId); assertPojoEquals(reqVO, fileConfig, "id", "config"); assertFalse(fileConfig.getMaster()); assertEquals("/yunai", ((LocalFileClientConfig) fileConfig.getConfig()).getBasePath()); assertEquals("https://www.iocoder.cn", ((LocalFileClientConfig) fileConfig.getConfig()).getDomain()); // 验证 cache assertNull(fileConfigService.getClientCache().getIfPresent(fileConfigId)); }
public RuntimeOptionsBuilder parse(Class<?> clazz) { RuntimeOptionsBuilder args = new RuntimeOptionsBuilder(); for (Class<?> classWithOptions = clazz; hasSuperClass( classWithOptions); classWithOptions = classWithOptions.getSuperclass()) { CucumberOptions options = requireNonNull(optionsProvider).getOptions(classWithOptions); if (options != null) { addDryRun(options, args); addMonochrome(options, args); addTags(classWithOptions, options, args); addPlugins(options, args); addPublish(options, args); addName(options, args); addSnippets(options, args); addGlue(options, args); addFeatures(options, args); addObjectFactory(options, args); addUuidGenerator(options, args); } } addDefaultFeaturePathIfNoFeaturePathIsSpecified(args, clazz); addDefaultGlueIfNoOverridingGlueIsSpecified(args, clazz); return args; }
@Test void default_snippet_type_should_not_override_existing_snippet_type() { RuntimeOptions options = new RuntimeOptionsBuilder().setSnippetType(SnippetType.CAMELCASE).build(); RuntimeOptions runtimeOptions = parser().parse(WithDefaultOptions.class).build(options); assertThat(runtimeOptions.getSnippetType(), is(equalTo(SnippetType.CAMELCASE))); }
public synchronized int sendFetches() { final Map<Node, FetchSessionHandler.FetchRequestData> fetchRequests = prepareFetchRequests(); sendFetchesInternal( fetchRequests, (fetchTarget, data, clientResponse) -> { synchronized (Fetcher.this) { handleFetchSuccess(fetchTarget, data, clientResponse); } }, (fetchTarget, data, error) -> { synchronized (Fetcher.this) { handleFetchFailure(fetchTarget, data, error); } }); return fetchRequests.size(); }
@Test public void testFetchDuringCooperativeRebalance() { buildFetcher(); subscriptions.subscribe(singleton(topicName), Optional.empty()); subscriptions.assignFromSubscribed(singleton(tp0)); subscriptions.seek(tp0, 0); client.updateMetadata(RequestTestUtils.metadataUpdateWithIds( 1, singletonMap(topicName, 4), tp -> validLeaderEpoch, topicIds)); assertEquals(1, sendFetches()); // Now the cooperative rebalance happens and fetch positions are NOT cleared for unrevoked partitions subscriptions.assignFromSubscribed(singleton(tp0)); client.prepareResponse(fullFetchResponse(tidp0, records, Errors.NONE, 100L, 0)); consumerClient.poll(time.timer(0)); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> fetchedRecords = fetchRecords(); // The active fetch should NOT be ignored since the position for tp0 is still valid assertEquals(1, fetchedRecords.size()); assertEquals(3, fetchedRecords.get(tp0).size()); }
public Map<String, Object> getFields() { return ImmutableMap.copyOf(fields); }
@Test public void testGetFields() throws Exception { final Map<String, Object> fields = message.getFields(); assertEquals(message.getId(), fields.get("_id")); assertEquals(message.getMessage(), fields.get("message")); assertEquals(message.getSource(), fields.get("source")); assertEquals(message.getField("timestamp"), fields.get("timestamp")); }
@Override public ItemChangeSets resolve(long namespaceId, String configText, List<ItemDTO> baseItems) { Map<Integer, ItemDTO> oldLineNumMapItem = BeanUtils.mapByKey("lineNum", baseItems); Map<String, ItemDTO> oldKeyMapItem = BeanUtils.mapByKey("key", baseItems); //remove comment and blank item map. oldKeyMapItem.remove(""); String[] newItems = configText.split(ITEM_SEPARATOR); Set<String> repeatKeys = new HashSet<>(); if (isHasRepeatKey(newItems, repeatKeys)) { throw new BadRequestException("Config text has repeated keys: %s, please check your input.", repeatKeys); } ItemChangeSets changeSets = new ItemChangeSets(); Map<Integer, String> newLineNumMapItem = new HashMap<>();//use for delete blank and comment item int lineCounter = 1; for (String newItem : newItems) { newItem = newItem.trim(); newLineNumMapItem.put(lineCounter, newItem); ItemDTO oldItemByLine = oldLineNumMapItem.get(lineCounter); //comment item if (isCommentItem(newItem)) { handleCommentLine(namespaceId, oldItemByLine, newItem, lineCounter, changeSets); //blank item } else if (isBlankItem(newItem)) { handleBlankLine(namespaceId, oldItemByLine, lineCounter, changeSets); //normal item } else { handleNormalLine(namespaceId, oldKeyMapItem, newItem, lineCounter, changeSets); } lineCounter++; } deleteCommentAndBlankItem(oldLineNumMapItem, newLineNumMapItem, changeSets); deleteNormalKVItem(oldKeyMapItem, changeSets); return changeSets; }
@Test public void testDeleteBlankItem(){ ItemChangeSets changeSets = resolver.resolve(1, "#qqqq\na=b\nb=c", mockBaseItemWith2Key1Comment1Blank()); Assert.assertEquals(1, changeSets.getDeleteItems().size()); Assert.assertEquals(1, changeSets.getUpdateItems().size()); Assert.assertEquals(0, changeSets.getCreateItems().size()); }
public static String substitute(final String input, final String pattern, final String sub) { StringBuilder ret = new StringBuilder(input.length()); int start = 0; int index = -1; final int length = pattern.length(); while ((index = input.indexOf(pattern, start)) >= start) { ret.append(input.substring(start, index)); ret.append(sub); start = index + length; } ret.append(input.substring(start)); return ret.toString(); }
@Test public void testSub2() throws Exception { String input = "arg1=param1;param1"; String pattern = "param1"; String sub = "${value}"; assertEquals("arg1=${value};${value}", StringUtilities.substitute(input, pattern, sub)); }
@Override public void validateOffsetFetch( String memberId, int memberEpoch, long lastCommittedOffset ) { throw new UnsupportedOperationException("validateOffsetFetch is not supported for Share Groups."); }
@Test public void testValidateOffsetFetch() { ShareGroup shareGroup = createShareGroup("group-foo"); assertThrows(UnsupportedOperationException.class, () -> shareGroup.validateOffsetFetch(null, -1, -1)); }
public NodeMgr getNodeMgr() { return nodeMgr; }
@Test(expected = DdlException.class) public void testUpdateModifyCurrentMasterException() throws Exception { GlobalStateMgr globalStateMgr = mockGlobalStateMgr(); ModifyFrontendAddressClause clause = new ModifyFrontendAddressClause("test-address", "sandbox-fqdn"); // this case will occur [can not modify current master node] exception globalStateMgr.getNodeMgr().modifyFrontendHost(clause); }
Optional<String> getQueriesFile(final Map<String, String> properties) { if (queriesFile != null) { return Optional.of(queriesFile); } return Optional.ofNullable(properties.get(QUERIES_FILE_CONFIG)); }
@Test public void shouldHaveQueriesFileIfInProperties() { // Given: final Map<String, String> propsFile = ImmutableMap.of( ServerOptions.QUERIES_FILE_CONFIG, "/path/to/file" ); // Then: assertThat(serverOptions.getQueriesFile(propsFile), is(Optional.of("/path/to/file"))); }
@Override public <U> ParSeqBasedCompletionStage<Void> thenAcceptBothAsync(CompletionStage<? extends U> other, BiConsumer<? super T, ? super U> action, Executor executor) { Task<U> that = getOrGenerateTaskFromStage(other); return nextStageByComposingTask(Task.par(_task, that).flatMap("thenAcceptBothAsync", (t, u) -> Task.blocking(() -> { action.accept(t, u); return null; }, executor))); }
@Test public void testThenAcceptBothAsync() throws Exception { CountDownLatch waitLatch = new CountDownLatch(1); CompletionStage<String> completionStage1 = createTestStage(TESTVALUE1); CompletionStage<String> completionStage2 = createTestStage(TESTVALUE2); finish(completionStage1.thenAcceptBothAsync(completionStage2, (a, b) -> { assertEquals(THREAD_NAME_VALUE, Thread.currentThread().getName()); waitLatch.countDown(); }, _mockExecutor)); assertTrue(waitLatch.await(1000, TimeUnit.MILLISECONDS)); }
static <T extends Job> void schedule(Collection<QueueItem<T>> queueItems, MapPosition mapPosition, int tileSize) { for (QueueItem<T> queueItem : queueItems) { queueItem.setPriority(calculatePriority(queueItem.object.tile, mapPosition, tileSize)); } }
@Test public void scheduleTest() { for (int tileSize : TILE_SIZES) { Tile tile0 = new Tile(0, 0, (byte) 0, tileSize); Job job = new Job(tile0, false); QueueItem<Job> queueItem = new QueueItem<Job>(job); Assert.assertEquals(0, queueItem.getPriority(), 0); MapPosition mapPosition = new MapPosition(new LatLong(0, 0), (byte) 0); QueueItemScheduler.schedule(createCollection(queueItem), mapPosition, tileSize); Assert.assertEquals(0, queueItem.getPriority(), 0); mapPosition = new MapPosition(new LatLong(0, 180), (byte) 0); QueueItemScheduler.schedule(createCollection(queueItem), mapPosition, tileSize); int halfTileSize = tileSize / 2; Assert.assertEquals(halfTileSize, queueItem.getPriority(), 0); mapPosition = new MapPosition(new LatLong(0, -180), (byte) 0); QueueItemScheduler.schedule(createCollection(queueItem), mapPosition, tileSize); Assert.assertEquals(halfTileSize, queueItem.getPriority(), 0); mapPosition = new MapPosition(new LatLong(0, 0), (byte) 1); QueueItemScheduler.schedule(createCollection(queueItem), mapPosition, tileSize); double expectedPriority = Math.hypot(halfTileSize, halfTileSize) + QueueItemScheduler.PENALTY_PER_ZOOM_LEVEL * tileSize; Assert.assertEquals(expectedPriority, queueItem.getPriority(), 0); } }
public static String removePrefix(final String name) { return name.startsWith(PRE_FIX) ? name.substring(1) : name; }
@Test void removePrefix() { String ret = UriUtils.removePrefix("http"); assertEquals("http", ret); ret = UriUtils.removePrefix("/http"); assertEquals("http", ret); }
public FEELFnResult<TemporalAccessor> invoke(@ParameterName("from") String val) { if ( val == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null")); } try { TemporalAccessor parsed = FEEL_TIME.parse(val); if (parsed.query(TemporalQueries.offset()) != null) { // it is an offset-zoned time, so I can know for certain an OffsetTime OffsetTime asOffSetTime = parsed.query(OffsetTime::from); return FEELFnResult.ofResult(asOffSetTime); } else if (parsed.query(TemporalQueries.zone()) == null) { // if it does not contain any zone information at all, then I know for certain is a local time. LocalTime asLocalTime = parsed.query(LocalTime::from); return FEELFnResult.ofResult(asLocalTime); } else if (parsed.query(TemporalQueries.zone()) != null) { boolean hasSeconds = timeStringWithSeconds(val); LocalTime asLocalTime = parsed.query(LocalTime::from); ZoneId zoneId = parsed.query(TemporalQueries.zone()); ZoneTime zoneTime = ZoneTime.of(asLocalTime, zoneId, hasSeconds); return FEELFnResult.ofResult(zoneTime); } return FEELFnResult.ofResult(parsed); } catch (DateTimeException e) { return manageDateTimeException(e, val); } }
@Test void invokeTimeUnitsParamsUnsupportedNumber() { FunctionTestUtil.assertResultError(timeFunction.invoke(Double.POSITIVE_INFINITY, 1, 1, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(Double.NEGATIVE_INFINITY, 1, 1, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(1, Double.POSITIVE_INFINITY, 1, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(1, Double.NEGATIVE_INFINITY, 1, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(1, 1, Double.POSITIVE_INFINITY, null), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(timeFunction.invoke(1, 1, Double.NEGATIVE_INFINITY, null), InvalidParametersEvent.class); }
@ApiOperation(value = "Delete widgets bundle (deleteWidgetsBundle)", notes = "Deletes the widget bundle. Referencing non-existing Widget Bundle Id will cause an error." + SYSTEM_OR_TENANT_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAnyAuthority('SYS_ADMIN', 'TENANT_ADMIN')") @RequestMapping(value = "/widgetsBundle/{widgetsBundleId}", method = RequestMethod.DELETE) @ResponseStatus(value = HttpStatus.OK) public void deleteWidgetsBundle( @Parameter(description = WIDGET_BUNDLE_ID_PARAM_DESCRIPTION, required = true) @PathVariable("widgetsBundleId") String strWidgetsBundleId) throws ThingsboardException { checkParameter("widgetsBundleId", strWidgetsBundleId); WidgetsBundleId widgetsBundleId = new WidgetsBundleId(toUUID(strWidgetsBundleId)); WidgetsBundle widgetsBundle = checkWidgetsBundleId(widgetsBundleId, Operation.DELETE); tbWidgetsBundleService.delete(widgetsBundle, getCurrentUser()); }
@Test public void testDeleteWidgetsBundle() throws Exception { WidgetsBundle widgetsBundle = new WidgetsBundle(); widgetsBundle.setTitle("My widgets bundle"); WidgetsBundle savedWidgetsBundle = doPost("/api/widgetsBundle", widgetsBundle, WidgetsBundle.class); Mockito.reset(tbClusterService, auditLogService); doDelete("/api/widgetsBundle/" + savedWidgetsBundle.getId().getId().toString()) .andExpect(status().isOk()); String savedWidgetsBundleIdStr = savedWidgetsBundle.getId().getId().toString(); testNotifyEntityAllOneTime(savedWidgetsBundle, savedWidgetsBundle.getId(), savedWidgetsBundle.getId(), savedTenant.getId(), tenantAdmin.getCustomerId(), tenantAdmin.getId(), tenantAdmin.getEmail(), ActionType.DELETED); doGet("/api/widgetsBundle/" + savedWidgetsBundleIdStr) .andExpect(status().isNotFound()) .andExpect(statusReason(containsString(msgErrorNoFound("Widgets bundle", savedWidgetsBundleIdStr)))); }
@Override public void v(String tag, String message, Object... args) { }
@Test public void verboseNotLogged() { logger.v(tag, "Hello %s", "World"); assertNotLogged(); }
public void formatSource(CharSource input, CharSink output) throws FormatterException, IOException { // TODO(cushon): proper support for streaming input/output. Input may // not be feasible (parsing) but output should be easier. output.write(formatSource(input.read())); }
@Test public void onlyWrapLineCommentOnWhitespace() throws Exception { assertThat( new Formatter() .formatSource( "class T {\n" + " public static void main(String[] args) { // one_long_incredibly" + "_unbroken_sentence_moving_from_topic_to_topic_so_that_no-one_had_a" + "_chance_to_interrupt;\n" + " }\n" + "}\n")) .isEqualTo( "class T {\n" + " public static void main(\n" + " String[]\n" + " args) { // one_long_incredibly" + "_unbroken_sentence_moving_from_topic_to_topic_so_that_no-one_had_a" + "_chance_to_interrupt;\n" + " }\n" + "}\n"); }
public NodeModel beginPathElement(int level) { return relativeNode(commonAncestor, beginPath, level); }
@Test public void beginPathElement(){ final NodeModel parent = root(); final NodeModel node1 = new NodeModel("node1", map); parent.insert(node1); final NodeModel node2 = new NodeModel("node2", map); parent.insert(node2); final NodeRelativePath nodeRelativePath = new NodeRelativePath(node1, node2); assertThat(nodeRelativePath.beginPathElement(1), equalTo(node1)); }
synchronized ActivateWorkResult activateWorkForKey(ExecutableWork executableWork) { ShardedKey shardedKey = executableWork.work().getShardedKey(); Deque<ExecutableWork> workQueue = activeWork.getOrDefault(shardedKey, new ArrayDeque<>()); // This key does not have any work queued up on it. Create one, insert Work, and mark the work // to be executed. if (!activeWork.containsKey(shardedKey) || workQueue.isEmpty()) { workQueue.addLast(executableWork); activeWork.put(shardedKey, workQueue); incrementActiveWorkBudget(executableWork.work()); return ActivateWorkResult.EXECUTE; } // Check to see if we have this work token queued. Iterator<ExecutableWork> workIterator = workQueue.iterator(); while (workIterator.hasNext()) { ExecutableWork queuedWork = workIterator.next(); if (queuedWork.id().equals(executableWork.id())) { return ActivateWorkResult.DUPLICATE; } if (queuedWork.id().cacheToken() == executableWork.id().cacheToken()) { if (executableWork.id().workToken() > queuedWork.id().workToken()) { // Check to see if the queuedWork is active. We only want to remove it if it is NOT // currently active. if (!queuedWork.equals(workQueue.peek())) { workIterator.remove(); decrementActiveWorkBudget(queuedWork.work()); } // Continue here to possibly remove more non-active stale work that is queued. } else { return ActivateWorkResult.STALE; } } } // Queue the work for later processing. workQueue.addLast(executableWork); incrementActiveWorkBudget(executableWork.work()); return ActivateWorkResult.QUEUED; }
@Test public void testActivateWorkForKey_DUPLICATE() { long workToken = 10L; ShardedKey shardedKey = shardedKey("someKey", 1L); // ActivateWork with the same shardedKey, and the same workTokens. activeWorkState.activateWorkForKey(createWork(createWorkItem(workToken, 1L, shardedKey))); ActivateWorkResult activateWorkResult = activeWorkState.activateWorkForKey(createWork(createWorkItem(workToken, 1L, shardedKey))); assertEquals(ActivateWorkResult.DUPLICATE, activateWorkResult); }
@Override @Transactional public void deleteProduct(Integer id) { this.productRepository.deleteById(id); }
@Test void deleteProduct_DeletesProduct() { // given var productId = 1; // when this.service.deleteProduct(productId); // then verify(this.productRepository).deleteById(productId); verifyNoMoreInteractions(this.productRepository); }
static String strip(final String line) { return new Parser(line).parse(); }
@Test public void shouldTerminateCommentAtNewLine() { // Given: final String line = "some multi-line -- this is a comment\n" + "statement"; // Then: assertThat(CommentStripper.strip(line), is("some multi-line\nstatement")); }
public static Object parse(String element) throws PathSegment.PathSegmentSyntaxException { Queue<Token> tokens = tokenizeElement(element); Object result = parseElement(tokens); if (!tokens.isEmpty()) { throw new PathSegment.PathSegmentSyntaxException("tokens left over after parsing; first excess token: " + tokens.peek().toErrorString() ); } return result; }
@Test(dataProvider = "basicDecodable") public void testBasicDecoding(String decodable, Object expectedObj) throws PathSegment.PathSegmentSyntaxException { Object actualObj = URIElementParser.parse(decodable); Assert.assertEquals(actualObj, expectedObj); }
@Override public KTable<K, V> reduce(final Reducer<V> reducer) { return reduce(reducer, Materialized.with(keySerde, valueSerde)); }
@Test public void shouldLogAndMeasureSkipsInReduce() { groupedStream.reduce( MockReducer.STRING_ADDER, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("reduce") .withKeySerde(Serdes.String()) .withValueSerde(Serdes.String()) ); try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(KStreamReduce.class); final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) { processData(driver); assertThat( appender.getMessages(), hasItem("Skipping record due to null key or value. topic=[topic] partition=[0] " + "offset=[6]") ); } }
@Override public void isEqualTo(@Nullable Object expected) { if (sameClassMessagesWithDifferentDescriptors(actual, expected)) { // This can happen with DynamicMessages, and it's very confusing if they both have the // same string. failWithoutActual( simpleFact("Not true that messages compare equal; they have different descriptors."), fact("expected", expected), fact("with descriptor", ((Message) expected).getDescriptorForType()), fact("but was", actual), fact("with descriptor", actual.getDescriptorForType())); } else if (notMessagesWithSameDescriptor(actual, expected)) { super.isEqualTo(expected); } else { DiffResult diffResult = makeDifferencer((Message) expected).diffMessages(actual, (Message) expected); if (!diffResult.isMatched()) { failWithoutActual( simpleFact( "Not true that messages compare equal.\n" + diffResult.printToString(config.reportMismatchesOnly()))); } } }
@Test public void testDifferentClasses() throws InvalidProtocolBufferException { Message message = parse("o_int: 3"); DynamicMessage dynamicMessage = DynamicMessage.parseFrom(message.getDescriptorForType(), message.toByteString()); expectThat(message).isEqualTo(dynamicMessage); expectThat(dynamicMessage).isEqualTo(message); }
public Object resolve(final Expression expression) { return new Visitor().process(expression, null); }
@Test public void shouldThrowIfCannotParseTimestamp() { // Given: final SqlType type = SqlTypes.TIMESTAMP; final Expression exp = new StringLiteral("abc"); // When: final KsqlException e = assertThrows( KsqlException.class, () -> new GenericExpressionResolver(type, FIELD_NAME, registry, config, "insert value", false).resolve(exp)); // Then: assertThat(e.getMessage(), containsString("Timestamp format must be yyyy-mm-ddThh:mm:ss[.S]")); }
public static byte[] readBytes(InputStream in) throws IORuntimeException { return readBytes(in, true); }
@Test public void readBytesWithLengthTest() { // 读取固定长度 final int limit = RandomUtil.randomInt(22807); final byte[] bytes = IoUtil.readBytes(ResourceUtil.getStream("hutool.jpg"), limit); assertEquals(limit, bytes.length); }
@Override public AbstractWALEvent decode(final ByteBuffer data, final BaseLogSequenceNumber logSequenceNumber) { AbstractWALEvent result; byte[] bytes = new byte[data.remaining()]; data.get(bytes); String dataText = new String(bytes, StandardCharsets.UTF_8); if (decodeWithTX) { result = decodeDataWithTX(dataText); } else { result = decodeDataIgnoreTX(dataText); } result.setLogSequenceNumber(logSequenceNumber); return result; }
@Test void assertDecodeWithTsquery() { MppTableData tableData = new MppTableData(); tableData.setTableName("public.test"); tableData.setOpType("INSERT"); tableData.setColumnsName(new String[]{"data"}); tableData.setColumnsType(new String[]{"tsquery"}); tableData.setColumnsVal(new String[]{"'''fff'' | ''faa'''"}); ByteBuffer data = ByteBuffer.wrap(JsonUtils.toJsonString(tableData).getBytes()); WriteRowEvent actual = (WriteRowEvent) new MppdbDecodingPlugin(null, false, false).decode(data, logSequenceNumber); Object byteaObj = actual.getAfterRow().get(0); assertThat(byteaObj.toString(), is("'fff' | 'faa'")); }
public String getFormattedMessage() { if (formattedMessage != null) { return formattedMessage; } if (argumentArray != null) { formattedMessage = MessageFormatter.arrayFormat(message, argumentArray).getMessage(); } else { formattedMessage = message; } return formattedMessage; }
@Test public void testNoFormattingWithoutArgs() { String message = "testNoFormatting"; Throwable throwable = null; Object[] argArray = null; LoggingEvent event = new LoggingEvent("", logger, Level.INFO, message, throwable, argArray); assertNull(event.formattedMessage); assertEquals(message, event.getFormattedMessage()); }
@Override public final ChannelId id() { return id; }
@Test public void ensureDefaultChannelId() { TestChannel channel = new TestChannel(); final ChannelId channelId = channel.id(); assertTrue(channelId instanceof DefaultChannelId); }
@Override public Capabilities fromDTO(CapabilitiesDTO capabilitiesDTO) { return new Capabilities(capabilitiesDTO.supportsPluginStatusReport(), capabilitiesDTO.supportsClusterStatusReport(), capabilitiesDTO.supportsAgentStatusReport()); }
@Test public void fromDTO_shouldConvertToCapabilitiesFromCapabilitiesDTO() { when(capabilitiesDTO.supportsPluginStatusReport()).thenReturn(false); when(capabilitiesDTO.supportsAgentStatusReport()).thenReturn(false); assertFalse(capabilitiesConverter.fromDTO(capabilitiesDTO).supportsPluginStatusReport()); assertFalse(capabilitiesConverter.fromDTO(capabilitiesDTO).supportsAgentStatusReport()); when(capabilitiesDTO.supportsPluginStatusReport()).thenReturn(true); when(capabilitiesDTO.supportsAgentStatusReport()).thenReturn(true); assertTrue(capabilitiesConverter.fromDTO(capabilitiesDTO).supportsPluginStatusReport()); assertTrue(capabilitiesConverter.fromDTO(capabilitiesDTO).supportsAgentStatusReport()); when(capabilitiesDTO.supportsPluginStatusReport()).thenReturn(false); when(capabilitiesDTO.supportsAgentStatusReport()).thenReturn(true); assertFalse(capabilitiesConverter.fromDTO(capabilitiesDTO).supportsPluginStatusReport()); assertTrue(capabilitiesConverter.fromDTO(capabilitiesDTO).supportsAgentStatusReport()); when(capabilitiesDTO.supportsPluginStatusReport()).thenReturn(true); when(capabilitiesDTO.supportsAgentStatusReport()).thenReturn(false); assertTrue(capabilitiesConverter.fromDTO(capabilitiesDTO).supportsPluginStatusReport()); assertFalse(capabilitiesConverter.fromDTO(capabilitiesDTO).supportsAgentStatusReport()); }
private void addCollectorApplication(HttpServletRequest req, HttpServletResponse resp) throws IOException { final String appName = req.getParameter("appName"); final String appUrls = req.getParameter("appUrls"); final String action = req.getParameter("action"); final String[] aggregatedApps = req.getParameterValues("aggregatedApps"); try { if (appName == null || appUrls == null && aggregatedApps == null) { throw new IllegalArgumentException(I18N.getString("donnees_manquantes")); } if (appUrls != null && !appUrls.startsWith("http://") && !appUrls.startsWith("https://")) { throw new IllegalArgumentException(I18N.getString("urls_format")); } final CollectorController collectorController = new CollectorController( collectorServer); if ("unregisterNode".equals(action)) { collectorController.removeCollectorApplicationNodes(appName, appUrls); LOGGER.info("monitored application node removed: " + appName + ", url: " + appUrls); } else if (appUrls != null) { collectorController.addCollectorApplication(appName, appUrls); LOGGER.info("monitored application added: " + appName); LOGGER.info("urls of the monitored application: " + appUrls); } else { assert aggregatedApps != null; collectorController.addCollectorAggregationApplication(appName, List.of(aggregatedApps)); LOGGER.info("aggregation application added: " + appName); LOGGER.info("aggregated applications of the aggregation application: " + List.of(aggregatedApps)); } CollectorController.showAlertAndRedirectTo(resp, I18N.getFormattedString("application_ajoutee", appName), "?application=" + appName); } catch (final FileNotFoundException e) { final String message = I18N.getString("monitoring_configure"); throw new IllegalStateException(message + '\n' + e, e); } catch (final StreamCorruptedException e) { final String message = I18N.getFormattedString("reponse_non_comprise", appUrls); throw new IllegalStateException(message + '\n' + e, e); } }
@Test public void testAddCollectorApplication() throws IOException { CollectorServlet.addCollectorApplication("test", "http://localhost:8090/test"); CollectorServlet.removeCollectorApplication("test"); }
@Override protected String toHtmlDisplay(Element element, String query) { String label = element.getLabel(); int index = label.toLowerCase().indexOf(query.toLowerCase()); String before = label.substring(0, index); String match = label.substring(index, index + query.length()); String after = label.substring(index + query.length()); return NbBundle.getMessage(FuzzyElementLabelSearchProvider.class, "FuzzyElementLabelSearchProvider.result", before, match, after); }
@Test public void testHtmlLast() { Mockito.when(node.getLabel()).thenReturn("foobar"); Assert.assertTrue( new FuzzyElementLabelSearchProvider().toHtmlDisplay(node, "bar").contains("foo<b>bar</b>")); }
@Override public List<ServiceInfo> getSubscribeServices() { return changeNotifier.getSubscribeServices(); }
@Test void testGetSubscribeServices() { //when client.getSubscribeServices(); //then verify(changeNotifier, times(1)).getSubscribeServices(); }
public static <T> Object create(Class<T> iface, T implementation, RetryPolicy retryPolicy) { return RetryProxy.create(iface, new DefaultFailoverProxyProvider<T>(iface, implementation), retryPolicy); }
@Test public void testNoRetryOnAccessControlException() throws Exception { RetryPolicy policy = mock(RetryPolicy.class); RetryPolicy realPolicy = RetryPolicies.failoverOnNetworkException(5); setupMockPolicy(policy, realPolicy); UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create( UnreliableInterface.class, unreliableImpl, policy); try { unreliable.failsWithAccessControlExceptionEightTimes(); fail("Should fail"); } catch (AccessControlException e) { // expected verify(policy, times(1)).shouldRetry(any(Exception.class), anyInt(), anyInt(), anyBoolean()); assertEquals(RetryDecision.FAIL, caughtRetryAction.action); } }
@SuppressWarnings("unchecked") @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { if (!(statement.getStatement() instanceof CreateSource) && !(statement.getStatement() instanceof CreateAsSelect)) { return statement; } try { if (statement.getStatement() instanceof CreateSource) { final ConfiguredStatement<CreateSource> createStatement = (ConfiguredStatement<CreateSource>) statement; return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement); } else { final ConfiguredStatement<CreateAsSelect> createStatement = (ConfiguredStatement<CreateAsSelect>) statement; return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse( createStatement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } }
@Test public void shouldReturnStatementUnchangedIfCsAlreadyHasSchemas() { // Given: givenKeyAndValueInferenceSupported(); when(cs.getElements()).thenReturn(SOME_KEY_AND_VALUE_ELEMENTS_STREAM); // When: final ConfiguredStatement<?> result = injector.inject(csStatement); // Then: assertThat(result, is(sameInstance(csStatement))); }
public static boolean isEmpty( CharSequence val ) { return val == null || val.length() == 0; }
@Test public void testIsEmptyObjectArray() { assertTrue( Utils.isEmpty( (Object[]) null ) ); assertTrue( Utils.isEmpty( new Object[] {} ) ); assertFalse( Utils.isEmpty( new Object[] { "test" } ) ); }
public static String idOf(String entityUuid) { requireNonNull(entityUuid, "entityUuid can't be null"); return ID_PREFIX + entityUuid; }
@Test public void idOf_returns_argument_with_a_prefix() { String s = randomAlphabetic(12); assertThat(AuthorizationDoc.idOf(s)).isEqualTo("auth_" + s); }