focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public String toString() { return toStringHelper(getClass()) .add("version", Byte.toString(version)) .add("trafficClass", Byte.toString(trafficClass)) .add("flowLabel", Integer.toString(flowLabel)) .add("payloadLength", Short.toString(payloadLength)) .add("nextHeader", Byte.toString(nextHeader)) .add("hopLimit", Byte.toString(hopLimit)) .add("sourceAddress", Arrays.toString(sourceAddress)) .add("destinationAddress", Arrays.toString(destinationAddress)) .toString(); }
@Test public void testToStringIPv6() throws Exception { IPv6 ipv6 = deserializer.deserialize(bytePacket, 0, bytePacket.length); String str = ipv6.toString(); assertTrue(StringUtils.contains(str, "version=" + (byte) 6)); assertTrue(StringUtils.contains(str, "trafficClass=" + (byte) 0x93)); assertTrue(StringUtils.contains(str, "flowLabel=" + 0x13579)); assertTrue(StringUtils.contains(str, "nextHeader=" + PROTOCOL_UDP)); assertTrue(StringUtils.contains(str, "hopLimit=" + (byte) 32)); // TODO: test IPv6 source and destination address }
@PUT @Path("{id}") @ApiOperation("Update view") @AuditEvent(type = ViewsAuditEventTypes.VIEW_UPDATE) public ViewDTO update(@ApiParam(name = "id") @PathParam("id") @NotEmpty String id, @ApiParam @Valid ViewDTO dto, @Context SearchUser searchUser) { final ViewDTO updatedDTO = dto.toBuilder().id(id).build(); if (!searchUser.canUpdateView(updatedDTO)) { throw new ForbiddenException("Not allowed to edit " + summarize(updatedDTO) + "."); } validateIntegrity(updatedDTO, searchUser, false); var result = dbService.update(updatedDTO); recentActivityService.update(result.id(), result.type().equals(ViewDTO.Type.DASHBOARD) ? GRNTypes.DASHBOARD : GRNTypes.SEARCH, searchUser); return result; }
@Test public void updatesSearchSuccessfullyIfInvisibleFilterWasPresentBefore() { final ViewService viewService = mockViewService(TEST_SEARCH_VIEW); final var dto = ViewDTO.builder().searchId("1").title("2").state(new HashMap<>()).build(); when(viewService.update(any())).thenReturn(dto); final ViewsResource viewsResource = createViewsResource( viewService, mock(StartPageService.class), mock(RecentActivityService.class), mock(ClusterEventBus.class), referencedFiltersHelperWithIDs(Collections.singleton("<<Hidden filter, but not added by this update>>")), searchFilterVisibilityChecker(Collections.singletonList("<<Hidden filter, but not added by this update>>")), EMPTY_VIEW_RESOLVERS, SEARCH ); viewsResource.update(VIEW_ID, TEST_SEARCH_VIEW, SEARCH_USER); verify(viewService).update(TEST_SEARCH_VIEW); }
static String getRelativeFileInternal(File canonicalBaseFile, File canonicalFileToRelativize) { List<String> basePath = getPathComponents(canonicalBaseFile); List<String> pathToRelativize = getPathComponents(canonicalFileToRelativize); //if the roots aren't the same (i.e. different drives on a windows machine), we can't construct a relative //path from one to the other, so just return the canonical file if (!basePath.get(0).equals(pathToRelativize.get(0))) { return canonicalFileToRelativize.getPath(); } int commonDirs; StringBuilder sb = new StringBuilder(); for (commonDirs=1; commonDirs<basePath.size() && commonDirs<pathToRelativize.size(); commonDirs++) { if (!basePath.get(commonDirs).equals(pathToRelativize.get(commonDirs))) { break; } } boolean first = true; for (int i=commonDirs; i<basePath.size(); i++) { if (!first) { sb.append(File.separatorChar); } else { first = false; } sb.append(".."); } first = true; for (int i=commonDirs; i<pathToRelativize.size(); i++) { if (first) { if (sb.length() != 0) { sb.append(File.separatorChar); } first = false; } else { sb.append(File.separatorChar); } sb.append(pathToRelativize.get(i)); } if (sb.length() == 0) { return "."; } return sb.toString(); }
@Test public void pathUtilTest4() { File[] roots = File.listRoots(); File basePath = new File(roots[0] + "some" + File.separatorChar + "dir"); File relativePath = new File(roots[0] + "some" + File.separatorChar + "dir"); String path = PathUtil.getRelativeFileInternal(basePath, relativePath); Assert.assertEquals(path, "."); }
public static Read<String> readStrings() { return Read.newBuilder( (PubsubMessage message) -> new String(message.getPayload(), StandardCharsets.UTF_8)) .setCoder(StringUtf8Coder.of()) .build(); }
@Test public void testValueProviderSubscription() { StaticValueProvider<String> provider = StaticValueProvider.of("projects/project/subscriptions/subscription"); Read<String> pubsubRead = PubsubIO.readStrings().fromSubscription(provider); Pipeline.create().apply(pubsubRead); assertThat(pubsubRead.getSubscriptionProvider(), not(nullValue())); assertThat(pubsubRead.getSubscriptionProvider().isAccessible(), is(true)); assertThat(pubsubRead.getSubscriptionProvider().get().asPath(), equalTo(provider.get())); }
@Override public void processElement(StreamRecord<CommittableMessage<CommT>> element) throws Exception { committableCollector.addMessage(element.getValue()); }
@Test void testWaitForCommittablesOfLatestCheckpointBeforeCommitting() throws Exception { final MockCommitter committer = new MockCommitter(); final OneInputStreamOperatorTestHarness<CommittableMessage<Integer>, Void> testHarness = createTestHarness(committer); testHarness.open(); testHarness.processElement(new StreamRecord<>(new CommittableSummary<>(1, 1, 1L, 2, 0, 0))); testHarness.processElement(new StreamRecord<>(new CommittableWithLineage<>(1, 1L, 1))); testHarness.notifyOfCompletedCheckpoint(1); assertThat(testHarness.getOutput()).isEmpty(); // Not committed because incomplete assertThat(committer.committed).isEmpty(); testHarness.processElement(new StreamRecord<>(new CommittableWithLineage<>(2, 1L, 1))); testHarness.notifyOfCompletedCheckpoint(2); assertThat(testHarness.getOutput()).isEmpty(); assertThat(committer.committed).containsExactly(1, 2); testHarness.close(); }
@Override protected void runTask() { LOGGER.trace("Looking for deleted jobs that can be deleted permanently..."); int totalAmountOfPermanentlyDeletedJobs = storageProvider.deleteJobsPermanently(StateName.DELETED, now().minus(backgroundJobServerConfiguration().getPermanentlyDeleteDeletedJobsAfter())); LOGGER.debug("Found {} deleted jobs that were permanently deleted as part of JobRunr maintenance", totalAmountOfPermanentlyDeletedJobs); }
@Test void testTaskTakesIntoAccountConfiguration() { runTask(task); verify(storageProvider).deleteJobsPermanently(eq(DELETED), assertArg(x -> assertThat(x).isCloseTo(now().minus(Duration.ofDays(10)), within(5, SECONDS)))); }
@Override public KsMaterializedQueryResult<Row> get( final GenericKey key, final int partition, final Optional<Position> position ) { try { final KeyQuery<GenericKey, ValueAndTimestamp<GenericRow>> query = KeyQuery.withKey(key); StateQueryRequest<ValueAndTimestamp<GenericRow>> request = inStore(stateStore.getStateStoreName()) .withQuery(query) .withPartitions(ImmutableSet.of(partition)); if (position.isPresent()) { request = request.withPositionBound(PositionBound.at(position.get())); } final StateQueryResult<ValueAndTimestamp<GenericRow>> result = stateStore.getKafkaStreams().query(request); final QueryResult<ValueAndTimestamp<GenericRow>> queryResult = result.getPartitionResults().get(partition); // Some of these failures are retriable, and in the future, we may want to retry // locally before throwing. if (queryResult.isFailure()) { throw failedQueryException(queryResult); } else if (queryResult.getResult() == null) { return KsMaterializedQueryResult.rowIteratorWithPosition( Collections.emptyIterator(), queryResult.getPosition()); } else { final ValueAndTimestamp<GenericRow> row = queryResult.getResult(); return KsMaterializedQueryResult.rowIteratorWithPosition( ImmutableList.of(Row.of(stateStore.schema(), key, row.value(), row.timestamp())) .iterator(), queryResult.getPosition()); } } catch (final NotUpToBoundException | MaterializationException e) { throw e; } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldRangeQueryWithCorrectParams_lowerBound() { // Given: when(kafkaStreams.query(any())).thenReturn(getIteratorResult()); // When: table.get(PARTITION, A_KEY, null); // Then: verify(kafkaStreams).query(queryTypeCaptor.capture()); StateQueryRequest request = queryTypeCaptor.getValue(); assertThat(request.getQuery(), instanceOf(RangeQuery.class)); RangeQuery rangeQuery = (RangeQuery)request.getQuery(); assertThat(rangeQuery.getLowerBound(), is(Optional.of(A_KEY))); assertThat(rangeQuery.getUpperBound(), is(Optional.empty())); }
@Override public T call() throws BackgroundException { while(!cancel.isCanceled()) { try { return delegate.call(); } catch(BackgroundException e) { if(!this.retry(e, listener, cancel)) { throw e; } // Try again } } throw new ConnectionCanceledException(); }
@Test public void testCall() throws Exception { final AtomicInteger count = new AtomicInteger(); final DefaultRetryCallable<Void> c = new DefaultRetryCallable<>(new Host(new TestProtocol(Scheme.file)), 1, 0, new BackgroundExceptionCallable<Void>() { @Override public Void call() throws BackgroundException { count.incrementAndGet(); throw new ConnectionRefusedException("d", new SocketException()); } }, new DisabledProgressListener(), new TransferBackgroundActionState(new TransferStatus())); try { c.call(); fail(); } catch(ConnectionRefusedException e) { // Expected } assertEquals(2, count.get()); }
@Override public void updateUserLogin(Long id, String loginIp) { userMapper.updateById(new AdminUserDO().setId(id).setLoginIp(loginIp).setLoginDate(LocalDateTime.now())); }
@Test public void testUpdateUserLogin() { // mock 数据 AdminUserDO user = randomAdminUserDO(o -> o.setLoginDate(null)); userMapper.insert(user); // 准备参数 Long id = user.getId(); String loginIp = randomString(); // 调用 userService.updateUserLogin(id, loginIp); // 断言 AdminUserDO dbUser = userMapper.selectById(id); assertEquals(loginIp, dbUser.getLoginIp()); assertNotNull(dbUser.getLoginDate()); }
@Override public int writerIndex() { return writerIndex; }
@Test public void writerIndexBoundaryCheck1() { assertThrows(IndexOutOfBoundsException.class, new Executable() { @Override public void execute() { buffer.writerIndex(-1); } }); }
@Override public boolean tryInit(long value, long allocationSize) { return get(tryInitAsync(value, allocationSize)); }
@Test public void testCopy() { testTwoDatabase((r1, r2) -> { RIdGenerator generator = r1.getIdGenerator("test"); generator.tryInit(12, 2931); generator.copy("test1", 1); assertThat(r1.getKeys().count()).isEqualTo(2); assertThat(r2.getKeys().count()).isEqualTo(2); }); }
public static <InputT, OutputT> DoFnInvoker<InputT, OutputT> invokerFor( DoFn<InputT, OutputT> fn) { return ByteBuddyDoFnInvokerFactory.only().newByteBuddyInvoker(fn); }
@Test public void testOnWindowExpirationWithParam() { class MockFn extends DoFn<String, String> { @ProcessElement public void process(ProcessContext c) {} @OnWindowExpiration public void onWindowExpiration(BoundedWindow window) {} } MockFn fn = mock(MockFn.class); DoFnInvoker<String, String> invoker = DoFnInvokers.invokerFor(fn); invoker.invokeOnWindowExpiration(mockArgumentProvider); verify(fn).onWindowExpiration(mockWindow); }
@Override public Num calculate(BarSeries series, Position position) { if (position == null || position.getEntry() == null || position.getExit() == null) { return series.zero(); } Returns returns = new Returns(series, position, Returns.ReturnType.LOG); return calculateES(returns, confidence); }
@Test public void calculateOnlyWithLossPosition() { // regularly decreasing prices List<Double> prices = IntStream.rangeClosed(1, 100) .asDoubleStream() .boxed() .sorted(Collections.reverseOrder()) .collect(Collectors.toList()); series = new MockBarSeries(numFunction, prices); Position position = new Position(Trade.buyAt(series.getBeginIndex(), series), Trade.sellAt(series.getEndIndex(), series)); AnalysisCriterion esCriterion = getCriterion(); assertNumEquals(numOf(-0.35835189384561106), esCriterion.calculate(series, position)); }
@DeleteMapping() @TpsControl(pointName = "NamingServiceDeregister", name = "HttpNamingServiceDeregister") @Secured(action = ActionTypes.WRITE) public Result<String> remove( @RequestParam(value = "namespaceId", defaultValue = Constants.DEFAULT_NAMESPACE_ID) String namespaceId, @RequestParam("serviceName") String serviceName, @RequestParam(value = "groupName", defaultValue = Constants.DEFAULT_GROUP) String groupName) throws Exception { serviceOperatorV2.delete(Service.newService(namespaceId, groupName, serviceName)); NotifyCenter.publishEvent( new DeregisterServiceTraceEvent(System.currentTimeMillis(), namespaceId, groupName, serviceName)); return Result.success("ok"); }
@Test void testRemove() throws Exception { Result<String> actual = serviceController.remove(Constants.DEFAULT_NAMESPACE_ID, "service", Constants.DEFAULT_GROUP); verify(serviceOperatorV2).delete(Service.newService(Constants.DEFAULT_NAMESPACE_ID, Constants.DEFAULT_GROUP, "service")); assertEquals("ok", actual.getData()); assertEquals(ErrorCode.SUCCESS.getCode(), actual.getCode()); }
public static Map<String, Map<String, String>> revertNotify(Map<String, Map<String, String>> notify) { if (notify != null && notify.size() > 0) { Map<String, Map<String, String>> newNotify = new HashMap<>(); for (Map.Entry<String, Map<String, String>> entry : notify.entrySet()) { String serviceName = entry.getKey(); Map<String, String> serviceUrls = entry.getValue(); if (StringUtils.isNotContains(serviceName, ':') && StringUtils.isNotContains(serviceName, '/')) { if (CollectionUtils.isNotEmptyMap(serviceUrls)) { for (Map.Entry<String, String> entry2 : serviceUrls.entrySet()) { String url = entry2.getKey(); String query = entry2.getValue(); Map<String, String> params = StringUtils.parseQueryString(query); String group = params.get(GROUP_KEY); String version = params.get(VERSION_KEY); // params.remove("group"); // params.remove("version"); String name = serviceName; if (StringUtils.isNotEmpty(group)) { name = group + "/" + name; } if (StringUtils.isNotEmpty(version)) { name = name + ":" + version; } Map<String, String> newUrls = newNotify.computeIfAbsent(name, k -> new HashMap<>()); newUrls.put(url, StringUtils.toQueryString(params)); } } } else { newNotify.put(serviceName, serviceUrls); } } return newNotify; } return notify; }
@Test void testRevertNotify2() { String key = "perf/dubbo.test.api.HelloService:1.0.0"; Map<String, Map<String, String>> notify = new HashMap<String, Map<String, String>>(); Map<String, String> service = new HashMap<String, String>(); service.put("dubbo://127.0.0.1:20880/com.xxx.XxxService", "group=perf&version=1.0.0"); notify.put(key, service); Map<String, Map<String, String>> newRegister = UrlUtils.revertNotify(notify); Map<String, Map<String, String>> expectedRegister = new HashMap<String, Map<String, String>>(); service.put("dubbo://127.0.0.1:20880/com.xxx.XxxService", "group=perf&version=1.0.0"); expectedRegister.put("perf/dubbo.test.api.HelloService:1.0.0", service); assertEquals(expectedRegister, newRegister); }
public static ImpactMeasureBuilder fromMap(Map<String, Long> map) { checkImpactMap(map); return new ImpactMeasureBuilder(map); }
@Test void fromMap_whenMissingField_shouldThrowException() { Map<String, Long> map = Map.of(); assertThatThrownBy(() -> ImpactMeasureBuilder.fromMap(map)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Map must contain a total key"); }
@DeleteMapping("/batchDelete") public ShenyuAdminResult batchDelete(@RequestBody @NotEmpty final List<@NotBlank String> ids) { Integer deleteCount = mockRequestRecordService.batchDelete(ids); return ShenyuAdminResult.success(ShenyuResultMessage.DELETE_SUCCESS, deleteCount); }
@Test public void testBatchDelete() throws Exception { given(mockRequestRecordService.batchDelete(any())).willReturn(1); this.mockMvc.perform(MockMvcRequestBuilders.delete("/mock/batchDelete") .contentType(MediaType.APPLICATION_JSON) .content(GsonUtils.getInstance().toJson(Lists.newArrayList("1")))) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.DELETE_SUCCESS))) .andReturn(); }
@VisibleForTesting static String substituteForWildcardAddress(String configuredAddress, String defaultHost) { InetSocketAddress sockAddr = NetUtils.createSocketAddr(configuredAddress); final InetAddress addr = sockAddr.getAddress(); if (addr != null && addr.isAnyLocalAddress()) { return defaultHost + ":" + sockAddr.getPort(); } else { return configuredAddress; } }
@Test public void testSubstituteForWildcardAddress() throws IOException { assertEquals("foo:12345", DFSUtil.substituteForWildcardAddress("0.0.0.0:12345", "foo")); assertEquals("127.0.0.1:12345", DFSUtil.substituteForWildcardAddress("127.0.0.1:12345", "foo")); }
public static String getAttributesXml( Map<String, Map<String, String>> attributesMap ) { return getAttributesXml( attributesMap, XML_TAG ); }
@Test public void testGetAttributesXml_DefaultTag() { try ( MockedStatic<AttributesUtil> attributesUtilMockedStatic = mockStatic( AttributesUtil.class ) ) { attributesUtilMockedStatic.when( () -> AttributesUtil.getAttributesXml( anyMap() ) ).thenCallRealMethod(); attributesUtilMockedStatic.when( () -> AttributesUtil.getAttributesXml( anyMap(), anyString() ) ).thenCallRealMethod(); Map<String, String> attributesGroup = new HashMap<>(); Map<String, Map<String, String>> attributesMap = new HashMap<>(); attributesGroup.put( A_KEY, A_VALUE ); attributesMap.put( A_GROUP, attributesGroup ); String attributesXml = AttributesUtil.getAttributesXml( attributesMap ); assertNotNull( attributesXml ); // The default tag was used assertTrue( attributesXml.contains( AttributesUtil.XML_TAG ) ); // The group is present assertTrue( attributesXml.contains( A_GROUP ) ); // Both Key and Value are present assertTrue( attributesXml.contains( A_KEY ) ); assertTrue( attributesXml.contains( A_VALUE ) ); } }
public static void main(String[] args) { // create the alchemist shop with the potions var alchemistShop = new AlchemistShop(); // a brave visitor enters the alchemist shop and drinks all the potions alchemistShop.drinkPotions(); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
public TopicName createTopic(String topicName) { checkArgument(!topicName.isEmpty(), "topicName can not be empty"); checkIsUsable(); TopicName name = getTopicName(topicName); return createTopicInternal(name); }
@Test public void testCreateTopicShouldCreate() { when(topicAdminClient.createTopic(any(TopicName.class))).thenReturn(topic); TopicName createTopic = testManager.createTopic("topic-name"); assertThat(createTopic).isNotNull(); verify(topicAdminClient).createTopic(topicNameCaptor.capture()); TopicName actualTopicName = topicNameCaptor.getValue(); assertThat(actualTopicName.getProject()).isEqualTo(PROJECT_ID); assertThat(actualTopicName.getTopic()).matches(TEST_ID + "-\\d{17}-topic-name"); }
public static Expression generateFilterExpression(SearchArgument sarg) { return translate(sarg.getExpression(), sarg.getLeaves()); }
@Test public void testAndOperand() { SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); SearchArgument arg = builder .startAnd() .equals("salary", PredicateLeaf.Type.LONG, 3000L) .equals("salary", PredicateLeaf.Type.LONG, 4000L) .end().build(); And expected = (And) Expressions .and(Expressions.equal("salary", 3000L), Expressions.equal("salary", 4000L)); And actual = (And) HiveIcebergFilterFactory.generateFilterExpression(arg); assertEquals(actual.op(), expected.op()); assertEquals(actual.left().op(), expected.left().op()); assertEquals(actual.right().op(), expected.right().op()); }
public List<String> getChildren(final String key) { try { return client.getChildren().forPath(key); } catch (Exception e) { throw new ShenyuException(e); } }
@Test void getChildren() throws Exception { assertThrows(ShenyuException.class, () -> client.getChildren("/test")); GetChildrenBuilder getChildrenBuilder = mock(GetChildrenBuilder.class); when(curatorFramework.getChildren()).thenReturn(getChildrenBuilder); when(getChildrenBuilder.forPath(anyString())).thenReturn(new ArrayList<>()); List<String> children = client.getChildren("/test"); assertEquals(0, children.size()); }
public boolean containsKey(final K key) { return initialValue != get(key); }
@Test void shouldNotContainKeyOfAMissingKey() { assertFalse(map.containsKey(1)); }
public static StructType partitionType(Table table) { Collection<PartitionSpec> specs = table.specs().values(); return buildPartitionProjectionType("table partition", specs, allFieldIds(specs)); }
@Test public void testPartitionTypeWithAddingBackSamePartitionFieldInV1Table() { TestTables.TestTable table = TestTables.create(tableDir, "test", SCHEMA, BY_DATA_SPEC, V1_FORMAT_VERSION); table.updateSpec().removeField("data").commit(); table.updateSpec().addField("data").commit(); // in v1, we use void transforms instead of dropping partition fields StructType expectedType = StructType.of( NestedField.optional(1000, "data_1000", Types.StringType.get()), NestedField.optional(1001, "data", Types.StringType.get())); StructType actualType = Partitioning.partitionType(table); assertThat(actualType).isEqualTo(expectedType); }
@ApiOperation(value = "Send test email (sendTestMail)", notes = "Attempts to send test email to the System Administrator User using Mail Settings provided as a parameter. " + "You may change the 'To' email in the user profile of the System Administrator. " + SYSTEM_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAuthority('SYS_ADMIN')") @RequestMapping(value = "/settings/testMail", method = RequestMethod.POST) public void sendTestMail( @Parameter(description = "A JSON value representing the Mail Settings.") @RequestBody AdminSettings adminSettings) throws ThingsboardException { accessControlService.checkPermission(getCurrentUser(), Resource.ADMIN_SETTINGS, Operation.READ); adminSettings = checkNotNull(adminSettings); if (adminSettings.getKey().equals("mail")) { if (adminSettings.getJsonValue().has("enableOauth2") && adminSettings.getJsonValue().get("enableOauth2").asBoolean()) { AdminSettings mailSettings = checkNotNull(adminSettingsService.findAdminSettingsByKey(TenantId.SYS_TENANT_ID, "mail")); JsonNode refreshToken = mailSettings.getJsonValue().get("refreshToken"); if (refreshToken == null) { throw new ThingsboardException("Refresh token was not generated. Please, generate refresh token.", ThingsboardErrorCode.GENERAL); } ObjectNode settings = (ObjectNode) adminSettings.getJsonValue(); settings.put("refreshToken", refreshToken.asText()); } else { if (!adminSettings.getJsonValue().has("password")) { AdminSettings mailSettings = checkNotNull(adminSettingsService.findAdminSettingsByKey(TenantId.SYS_TENANT_ID, "mail")); ((ObjectNode) adminSettings.getJsonValue()).put("password", mailSettings.getJsonValue().get("password").asText()); } } String email = getCurrentUser().getEmail(); mailService.sendTestMail(adminSettings.getJsonValue(), email); } }
@Test public void testSendTestMailTimeout() throws Exception { loginSysAdmin(); AdminSettings adminSettings = doGet("/api/admin/settings/mail", AdminSettings.class); ObjectNode objectNode = JacksonUtil.fromString(adminSettings.getJsonValue().toString(), ObjectNode.class); objectNode.put("smtpHost", "mail.gandi.net"); objectNode.put("timeout", 1_000); objectNode.put("username", "username"); objectNode.put("password", "password"); adminSettings.setJsonValue(objectNode); doPost("/api/admin/settings/testMail", adminSettings).andExpect(status().is5xxServerError()); Mockito.verify(mailService).sendTestMail(Mockito.any(), Mockito.anyString()); }
@Override public Serde<GenericKey> create( final FormatInfo format, final PersistenceSchema schema, final KsqlConfig ksqlConfig, final Supplier<SchemaRegistryClient> schemaRegistryClientFactory, final String loggerNamePrefix, final ProcessingLogContext processingLogContext, final Optional<TrackedCallback> tracker ) { return createInner( format, schema, ksqlConfig, schemaRegistryClientFactory, loggerNamePrefix, processingLogContext, tracker ); }
@Test public void shouldCreateInnerSerdeWindowed() { // When: factory .create(format, TIMED_WND, schema, config, srClientFactory, LOGGER_PREFIX, processingLogCxt, Optional.empty()); // Then: verify(innerFactory).createFormatSerde("Key", format, schema, config, srClientFactory, true); }
static <U, V> ImmutableBiMap<U, V> maximumCardinalityBipartiteMatching(Multimap<U, V> graph) { return HopcroftKarp.overBipartiteGraph(graph).perform(); }
@Test public void maximumCardinalityBipartiteMatching_failsWithNullRhs() { ListMultimap<String, String> edges = LinkedListMultimap.create(); edges.put("L1", null); try { BiMap<String, String> unused = maximumCardinalityBipartiteMatching(edges); fail("Should have thrown."); } catch (NullPointerException expected) { } }
public CompletableFuture<JobDetailsInfo> getJobDetails(JobID jobId) { final JobDetailsHeaders detailsHeaders = JobDetailsHeaders.getInstance(); final JobMessageParameters params = new JobMessageParameters(); params.jobPathParameter.resolve(jobId); return sendRequest(detailsHeaders, params); }
@Test void testJobDetailsContainsSlotSharingGroupId() throws Exception { final IOMetricsInfo jobVertexMetrics = new IOMetricsInfo(0, false, 0, false, 0, false, 0, false, 0, 0, 0); SlotSharingGroupId slotSharingGroupId = new SlotSharingGroupId(); final Collection<JobDetailsInfo.JobVertexDetailsInfo> jobVertexDetailsInfos = Collections.singletonList( new JobDetailsInfo.JobVertexDetailsInfo( new JobVertexID(), slotSharingGroupId, "jobVertex1", 2, 1, ExecutionState.RUNNING, 1, 2, 1, Collections.singletonMap(ExecutionState.RUNNING, 0), jobVertexMetrics)); final JobDetailsInfo jobDetailsInfo = new JobDetailsInfo( jobId, "foobar", false, JobStatus.RUNNING, JobType.STREAMING, 1, 2, 1, 2, 10, Collections.singletonMap(JobStatus.RUNNING, 1L), jobVertexDetailsInfos, Collections.singletonMap(ExecutionState.RUNNING, 1), new JobPlanInfo.RawJson("{\"id\":\"1234\"}")); final TestJobDetailsInfoHandler jobDetailsInfoHandler = new TestJobDetailsInfoHandler(jobDetailsInfo); try (TestRestServerEndpoint restServerEndpoint = createRestServerEndpoint(jobDetailsInfoHandler)) { try (RestClusterClient<?> restClusterClient = createRestClusterClient(restServerEndpoint.getServerAddress().getPort())) { final CompletableFuture<JobDetailsInfo> jobDetailsInfoFuture = restClusterClient.getJobDetails(jobId); Collection<JobDetailsInfo.JobVertexDetailsInfo> jobVertexInfos = jobDetailsInfoFuture.get().getJobVertexInfos(); assertThat(jobVertexInfos).hasSize(1); assertThat(jobVertexInfos.iterator().next().getSlotSharingGroupId()) .isEqualTo(slotSharingGroupId); } } }
public Predicate convert(ScalarOperator operator) { if (operator == null) { return null; } return operator.accept(this, null); }
@Test public void testLessEq() { ConstantOperator value = ConstantOperator.createInt(5); ScalarOperator op = new BinaryPredicateOperator(BinaryType.LE, F0, value); Predicate result = CONVERTER.convert(op); Assert.assertTrue(result instanceof LeafPredicate); LeafPredicate leafPredicate = (LeafPredicate) result; Assert.assertTrue(leafPredicate.function() instanceof LessOrEqual); Assert.assertEquals(5, leafPredicate.literals().get(0)); }
public void cacheInstanceForRedo(String serviceName, String groupName, Instance instance) { String key = NamingUtils.getGroupedName(serviceName, groupName); InstanceRedoData redoData = InstanceRedoData.build(serviceName, groupName, instance); synchronized (registeredInstances) { registeredInstances.put(key, redoData); } }
@Test void testCacheInstanceForRedoByBatchInstanceRedoData() { ConcurrentMap<String, InstanceRedoData> registeredInstances = getInstanceRedoDataMap(); assertTrue(registeredInstances.isEmpty()); Instance instance = new Instance(); List<Instance> instanceList = new ArrayList<>(); instanceList.add(instance); redoService.cacheInstanceForRedo(SERVICE, GROUP, instanceList); assertFalse(registeredInstances.isEmpty()); BatchInstanceRedoData actual = (BatchInstanceRedoData) registeredInstances.entrySet().iterator().next() .getValue(); assertEquals(SERVICE, actual.getServiceName()); assertEquals(GROUP, actual.getGroupName()); assertEquals(instanceList, actual.getInstances()); assertFalse(actual.isRegistered()); assertFalse(actual.isUnregistering()); }
@Override public void export(RegisterTypeEnum registerType) { if (this.exported) { return; } if (getScopeModel().isLifeCycleManagedExternally()) { // prepare model for reference getScopeModel().getDeployer().prepare(); } else { // ensure start module, compatible with old api usage getScopeModel().getDeployer().start(); } synchronized (this) { if (this.exported) { return; } if (!this.isRefreshed()) { this.refresh(); } if (this.shouldExport()) { this.init(); if (shouldDelay()) { // should register if delay export doDelayExport(); } else if (Integer.valueOf(-1).equals(getDelay()) && Boolean.parseBoolean(ConfigurationUtils.getProperty( getScopeModel(), CommonConstants.DUBBO_MANUAL_REGISTER_KEY, "false"))) { // should not register by default doExport(RegisterTypeEnum.MANUAL_REGISTER); } else { doExport(registerType); } } } }
@Test void testMethodConfigWithInvalidArgumentConfig() { Assertions.assertThrows(IllegalArgumentException.class, () -> { ServiceConfig<DemoServiceImpl> service = new ServiceConfig<>(); service.setInterface(DemoService.class); service.setRef(new DemoServiceImpl()); service.setProtocol(new ProtocolConfig() { { setName("dubbo"); } }); MethodConfig methodConfig = new MethodConfig(); methodConfig.setName("sayName"); // invalid argument index. methodConfig.setArguments(Lists.newArrayList(new ArgumentConfig() { { // unset config. } })); service.setMethods(Lists.newArrayList(methodConfig)); service.export(); }); }
@Override protected int poll() throws Exception { // must reset for each poll shutdownRunningTask = null; pendingExchanges = 0; List<software.amazon.awssdk.services.sqs.model.Message> messages = pollingTask.call(); // okay we have some response from aws so lets mark the consumer as ready forceConsumerAsReady(); Queue<Exchange> exchanges = createExchanges(messages); return processBatch(CastUtils.cast(exchanges)); }
@Test void shouldIgnoreSortingByAllAttribute() throws Exception { // given configuration.setSortAttributeName("All"); try (var tested = createConsumer(-1)) { // when var polledMessagesCount = tested.poll(); // then var expectedRequest = expectedReceiveRequestBuilder() .messageSystemAttributeNames(List.of(SENT_TIMESTAMP, MESSAGE_GROUP_ID)) .maxNumberOfMessages(1) .build(); assertThat(polledMessagesCount).isZero(); assertThat(receivedExchanges).isEmpty(); assertThat(sqsClientMock.getReceiveRequests()).containsExactlyInAnyOrder(expectedRequest); assertThat(sqsClientMock.getQueues()).isEmpty(); } }
public synchronized void clearNonce() { nonce = BigInteger.valueOf(-1); }
@Test void clearNonce() throws IOException { fastRawTransactionManager.setNonce(BigInteger.valueOf(42)); fastRawTransactionManager.clearNonce(); BigInteger currentNonce = fastRawTransactionManager.getCurrentNonce(); assertEquals(currentNonce, BigInteger.valueOf(-1)); }
public static Boolean judge(final ConditionData conditionData, final String realData) { if (Objects.isNull(conditionData) || StringUtils.isBlank(conditionData.getOperator())) { return false; } PredicateJudge predicateJudge = newInstance(conditionData.getOperator()); if (!(predicateJudge instanceof BlankPredicateJudge) && StringUtils.isBlank(realData)) { return false; } return predicateJudge.judge(conditionData, realData); }
@Test public void testIsBlankJudge() { conditionData.setOperator(OperatorEnum.IS_BLANK.getAlias()); assertTrue(PredicateJudgeFactory.judge(conditionData, null)); assertTrue(PredicateJudgeFactory.judge(conditionData, "")); assertFalse(PredicateJudgeFactory.judge(conditionData, "test")); }
public long onStatusMessage( final StatusMessageFlyweight flyweight, final InetSocketAddress receiverAddress, final long senderLimit, final int initialTermId, final int positionBitsToShift, final long timeNs) { return processStatusMessage( flyweight, senderLimit, initialTermId, positionBitsToShift, timeNs, matchesTag(flyweight)); }
@Test void shouldReturnLastWindowWhenUntilReceiversAreInGroupWithNoMinSize() { final UdpChannel channelGroupSizeThree = UdpChannel.parse( "aeron:udp?endpoint=224.20.30.39:24326|interface=localhost|fc=tagged,g:123"); flowControl.initialize( newContext(), countersManager, channelGroupSizeThree, 0, 0, 0, 0, 0); final long groupTag = 123L; final long senderLimit = 5000L; final int termOffset0 = 10_000; final int termOffset1 = 9_999; assertEquals(termOffset0 + WINDOW_LENGTH, onStatusMessage(flowControl, 0, termOffset0, senderLimit, null)); assertEquals( termOffset1 + WINDOW_LENGTH, onStatusMessage(flowControl, 1, termOffset1, senderLimit, groupTag)); assertEquals(termOffset1 + WINDOW_LENGTH, onStatusMessage(flowControl, 0, termOffset0, senderLimit, null)); }
public WorkflowDefinition getWorkflowDefinition(String workflowId, String version) { if (IdHelper.isInlineWorkflowId(workflowId)) { return getInlineWorkflowDefinitionInternal(workflowId); } else { return getMaestroWorkflowInternal(workflowId, version).toDefinition(); } }
@Test public void testGetWorkflowDefinition() throws Exception { WorkflowDefinition wfd = loadWorkflow(TEST_WORKFLOW_ID1); assertEquals(TEST_WORKFLOW_ID1, wfd.getWorkflow().getId()); workflowDao.addWorkflowDefinition(wfd, wfd.getPropertiesSnapshot().extractProperties()); assertNotNull(wfd.getInternalId()); WorkflowDefinition definition = workflowDao.getWorkflowDefinition(wfd.getWorkflow().getId(), "latest"); assertNotNull(definition.getInternalId()); assertEquals(wfd, definition); assertEquals(TEST_WORKFLOW_ID1, definition.getMetadata().getWorkflowId()); assertEquals(1L, definition.getMetadata().getWorkflowVersionId().longValue()); definition = workflowDao.getWorkflowDefinition(wfd.getWorkflow().getId(), "default"); assertNotNull(definition.getInternalId()); assertEquals(wfd, definition); assertEquals(TEST_WORKFLOW_ID1, definition.getMetadata().getWorkflowId()); assertEquals(1L, definition.getMetadata().getWorkflowVersionId().longValue()); definition = workflowDao.getWorkflowDefinition(wfd.getWorkflow().getId(), "active"); assertNotNull(definition.getInternalId()); assertEquals(wfd, definition); assertEquals(TEST_WORKFLOW_ID1, definition.getMetadata().getWorkflowId()); assertEquals(1L, definition.getMetadata().getWorkflowVersionId().longValue()); definition = workflowDao.getWorkflowDefinition(wfd.getWorkflow().getId(), "1"); assertNotNull(definition.getInternalId()); assertEquals(wfd, definition); assertEquals(TEST_WORKFLOW_ID1, definition.getMetadata().getWorkflowId()); assertEquals(1L, definition.getMetadata().getWorkflowVersionId().longValue()); }
public static int checkPositive(int i, String name) { if (i <= INT_ZERO) { throw new IllegalArgumentException(name + " : " + i + " (expected: > 0)"); } return i; }
@Test public void testCheckPositiveDoubleString() { Exception actualEx = null; try { ObjectUtil.checkPositive(POS_ONE_DOUBLE, NUM_POS_NAME); } catch (Exception e) { actualEx = e; } assertNull(actualEx, TEST_RESULT_NULLEX_NOK); actualEx = null; try { ObjectUtil.checkPositive(ZERO_DOUBLE, NUM_ZERO_NAME); } catch (Exception e) { actualEx = e; } assertNotNull(actualEx, TEST_RESULT_NULLEX_OK); assertTrue(actualEx instanceof IllegalArgumentException, TEST_RESULT_EXTYPE_NOK); actualEx = null; try { ObjectUtil.checkPositive(NEG_ONE_DOUBLE, NUM_NEG_NAME); } catch (Exception e) { actualEx = e; } assertNotNull(actualEx, TEST_RESULT_NULLEX_OK); assertTrue(actualEx instanceof IllegalArgumentException, TEST_RESULT_EXTYPE_NOK); }
@Override public RestLiResponseData<BatchGetResponseEnvelope> buildRestLiResponseData(Request request, RoutingResult routingResult, Object result, Map<String, String> headers, List<HttpCookie> cookies) { @SuppressWarnings({ "unchecked" }) /* constrained by signature of {@link com.linkedin.restli.server.resources.CollectionResource#batchGet(java.util.Set)} */ final Map<Object, RecordTemplate> entities = (Map<Object, RecordTemplate>) result; Map<Object, HttpStatus> statuses = Collections.emptyMap(); Map<Object, RestLiServiceException> serviceErrors = Collections.emptyMap(); if (result instanceof BatchResult) { @SuppressWarnings({ "unchecked" }) /* constrained by signature of {@link com.linkedin.restli.server.resources.CollectionResource#batchGet(java.util.Set)} */ final BatchResult<Object, RecordTemplate> batchResult = (BatchResult<Object, RecordTemplate>) result; statuses = batchResult.getStatuses(); serviceErrors = batchResult.getErrors(); } try { if (statuses.containsKey(null)) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null key inside of a Map returned by the resource method: " + routingResult .getResourceMethod()); } } catch (NullPointerException e) { // Some map implementations will throw an NPE if they do not support null keys. // In this case it is OK to swallow this exception and proceed. } TimingContextUtil.beginTiming(routingResult.getContext().getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); Map<Object, BatchResponseEntry> batchResult = new HashMap<>(entities.size() + serviceErrors.size()); for (Map.Entry<Object, RecordTemplate> entity : entities.entrySet()) { if (entity.getKey() == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null key inside of a Map returned by the resource method: " + routingResult .getResourceMethod()); } Object finalKey = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(entity.getKey(), routingResult); DataMap rawData = entity.getValue().data(); if (routingResult.getContext().isFillInDefaultsRequested()) { rawData = (DataMap) ResponseUtils.fillInDataDefault(entity.getValue().schema(), rawData); } final DataMap projectedData = RestUtils.projectFields(rawData, routingResult.getContext()); AnyRecord anyRecord = new AnyRecord(projectedData); batchResult.put(finalKey, new BatchResponseEntry(statuses.get(entity.getKey()), anyRecord)); } TimingContextUtil.endTiming(routingResult.getContext().getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); for (Map.Entry<Object, RestLiServiceException> entity : serviceErrors.entrySet()) { if (entity.getKey() == null || entity.getValue() == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null key inside of a Map returned by the resource method: " + routingResult .getResourceMethod()); } Object finalKey = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(entity.getKey(), routingResult); batchResult.put(finalKey, new BatchResponseEntry(statuses.get(entity.getKey()), entity.getValue())); } final Map<Object, RestLiServiceException> contextErrors = routingResult.getContext().getBatchKeyErrors(); for (Map.Entry<Object, RestLiServiceException> entry : contextErrors.entrySet()) { Object finalKey = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(entry.getKey(), routingResult); batchResult.put(finalKey, new BatchResponseEntry(statuses.get(entry.getKey()), entry.getValue())); } return new RestLiResponseDataImpl<>(new BatchGetResponseEnvelope(HttpStatus.S_200_OK, batchResult), headers, cookies); }
@Test public void testProjectionInBuildRestliResponseData() { MaskTree maskTree = new MaskTree(); maskTree.addOperation(new PathSpec("fruitsField"), MaskOperation.POSITIVE_MASK_OP); ServerResourceContext mockContext = EasyMock.createMock(ServerResourceContext.class); EasyMock.expect(mockContext.hasParameter(RestConstants.ALT_KEY_PARAM)).andReturn(false); EasyMock.expect(mockContext.getProjectionMode()).andReturn(ProjectionMode.AUTOMATIC); EasyMock.expect(mockContext.getProjectionMask()).andReturn(maskTree); EasyMock.expect(mockContext.getBatchKeyErrors()).andReturn(Collections.emptyMap()).once(); EasyMock.expect(mockContext.getRawRequestContext()).andReturn(new RequestContext()).anyTimes(); EasyMock.expect(mockContext.getAlwaysProjectedFields()).andReturn(Collections.emptySet()).anyTimes(); EasyMock.expect(mockContext.isFillInDefaultsRequested()).andReturn(false).anyTimes(); EasyMock.replay(mockContext); ResourceMethodDescriptor mockDescriptor = getMockResourceMethodDescriptor(null); RoutingResult routingResult = new RoutingResult(mockContext, mockDescriptor); Map<Integer, Foo> results = new HashMap<>(); Foo value = new Foo().setStringField("value").setFruitsField(Fruits.APPLE); results.put(1, value); BatchGetResponseBuilder responseBuilder = new BatchGetResponseBuilder(new ErrorResponseBuilder()); RestLiResponseData<BatchGetResponseEnvelope> responseData = responseBuilder.buildRestLiResponseData(null, routingResult, results, Collections.emptyMap(), Collections.emptyList()); RecordTemplate record = responseData.getResponseEnvelope().getBatchResponseMap().get(1).getRecord(); Assert.assertEquals(record.data().size(), 1); Assert.assertEquals(record.data().get("fruitsField"), Fruits.APPLE.toString()); EasyMock.verify(mockContext); }
public boolean matchesAtLeastThreeLevels(DependencyVersion version) { if (version == null) { return false; } if (Math.abs(this.versionParts.size() - version.versionParts.size()) >= 3) { return false; } final int max = Math.min(this.versionParts.size(), version.versionParts.size()); boolean ret = true; for (int i = 0; i < max; i++) { final String thisVersion = this.versionParts.get(i); final String otherVersion = version.getVersionParts().get(i); if (i >= 3) { if (thisVersion.compareToIgnoreCase(otherVersion) >= 0) { ret = false; break; } } else if (!thisVersion.equals(otherVersion)) { ret = false; break; } } return ret; }
@Test public void testMatchesAtLeastThreeLevels() { DependencyVersion instance = new DependencyVersion("2.3.16.3"); DependencyVersion version = new DependencyVersion("2.3.16.4"); //true tests assertEquals(true, instance.matchesAtLeastThreeLevels(version)); version = new DependencyVersion("2.3"); assertEquals(true, instance.matchesAtLeastThreeLevels(version)); //false tests version = new DependencyVersion("2.3.16.1"); assertEquals(false, instance.matchesAtLeastThreeLevels(version)); version = new DependencyVersion("2"); assertEquals(false, instance.matchesAtLeastThreeLevels(version)); }
public JmxCollector register() { return register(PrometheusRegistry.defaultRegistry); }
@Test public void testWhitelist() throws Exception { JmxCollector jc = new JmxCollector( "\n---\nwhitelistObjectNames:\n- java.lang:*\n- java.lang:*\n- org.apache.cassandra.concurrent:*" .replace('`', '"')) .register(prometheusRegistry); // Test what should and shouldn't be present. assertNotNull( getSampleValue( "java_lang_OperatingSystem_ProcessCpuTime", new String[] {}, new String[] {})); assertNotNull( getSampleValue( "org_apache_cassandra_concurrent_CONSISTENCY_MANAGER_ActiveCount", new String[] {}, new String[] {})); assertNull( getSampleValue( "org_apache_cassandra_metrics_Compaction_Value", new String[] {"name"}, new String[] {"CompletedTasks"})); assertNull( getSampleValue( "hadoop_DataNode_replaceBlockOpMinTime", new String[] {"name"}, new String[] {"DataNodeActivity-ams-hdd001-50010"})); }
static boolean apply(@Nullable HttpStatus httpStatus) { if (Objects.isNull(httpStatus)) { return false; } RpcEnhancementReporterProperties reportProperties; try { reportProperties = ApplicationContextAwareUtils.getApplicationContext() .getBean(RpcEnhancementReporterProperties.class); } catch (BeansException e) { LOG.error("get RpcEnhancementReporterProperties bean err", e); reportProperties = new RpcEnhancementReporterProperties(); } // statuses > series List<HttpStatus> status = reportProperties.getStatuses(); if (status.isEmpty()) { List<HttpStatus.Series> series = reportProperties.getSeries(); // Check INTERNAL_SERVER_ERROR (500) status. if (reportProperties.isIgnoreInternalServerError() && Objects.equals(httpStatus, INTERNAL_SERVER_ERROR)) { return false; } if (series.isEmpty()) { return HTTP_STATUSES.contains(httpStatus); } return series.contains(httpStatus.series()); } // Use the user-specified fuse status code. return status.contains(httpStatus); }
@Test public void testApplyWithoutIgnoreInternalServerError() { RpcEnhancementReporterProperties properties = new RpcEnhancementReporterProperties(); // Mock Condition properties.getStatuses().clear(); properties.setIgnoreInternalServerError(false); ApplicationContext applicationContext = mock(ApplicationContext.class); doReturn(properties) .when(applicationContext).getBean(RpcEnhancementReporterProperties.class); mockedApplicationContextAwareUtils.when(ApplicationContextAwareUtils::getApplicationContext) .thenReturn(applicationContext); // Assert assertThat(PolarisEnhancedPluginUtils.apply(HttpStatus.OK)).isEqualTo(false); assertThat(PolarisEnhancedPluginUtils.apply(HttpStatus.INTERNAL_SERVER_ERROR)).isEqualTo(true); assertThat(PolarisEnhancedPluginUtils.apply(HttpStatus.BAD_GATEWAY)).isEqualTo(true); }
public static <T> String render(ClassPluginDocumentation<T> classPluginDocumentation) throws IOException { return render("task", JacksonMapper.toMap(classPluginDocumentation)); }
@SuppressWarnings({"unchecked", "deprecation"}) @Test void echo() throws IOException { PluginScanner pluginScanner = new PluginScanner(ClassPluginDocumentationTest.class.getClassLoader()); RegisteredPlugin scan = pluginScanner.scan(); Class<Echo> bash = scan.findClass(Echo.class.getName()).orElseThrow(); ClassPluginDocumentation<? extends Task> doc = ClassPluginDocumentation.of(jsonSchemaGenerator, scan, bash, Task.class); String render = DocumentationGenerator.render(doc); assertThat(render, containsString("Echo")); assertThat(render, containsString("Deprecated")); }
@Override public void run() { try { interceptorChain.doInterceptor(task); } catch (Exception e) { Loggers.SRV_LOG.info("Interceptor health check task {} failed", task.getTaskId(), e); } }
@Test void testRunUnhealthyInstanceWithoutExpire() { injectInstance(false, 0); taskWrapper.run(); assertFalse(client.getAllInstancePublishInfo().isEmpty()); }
public static <T> T[] checkNonEmpty(T[] array, String name) { //No String concatenation for check if (checkNotNull(array, name).length == 0) { throw new IllegalArgumentException("Param '" + name + "' must not be empty"); } return array; }
@Test public void testCheckNonEmptyStringString() { Exception actualEx = null; try { ObjectUtil.checkNonEmpty((String) NULL_OBJECT, NULL_NAME); } catch (Exception e) { actualEx = e; } assertNotNull(actualEx, TEST_RESULT_NULLEX_OK); assertTrue(actualEx instanceof NullPointerException, TEST_RESULT_EXTYPE_NOK); actualEx = null; try { ObjectUtil.checkNonEmpty((String) NON_NULL_OBJECT, NON_NULL_NAME); } catch (Exception e) { actualEx = e; } assertNull(actualEx, TEST_RESULT_NULLEX_NOK); actualEx = null; try { ObjectUtil.checkNonEmpty((String) NON_NULL_EMPTY_STRING, NON_NULL_EMPTY_NAME); } catch (Exception e) { actualEx = e; } assertNotNull(actualEx, TEST_RESULT_NULLEX_OK); assertTrue(actualEx instanceof IllegalArgumentException, TEST_RESULT_EXTYPE_NOK); actualEx = null; try { ObjectUtil.checkNonEmpty((String) NON_NULL_WHITESPACE_STRING, NON_NULL_EMPTY_NAME); } catch (Exception e) { actualEx = e; } assertNull(actualEx, TEST_RESULT_NULLEX_NOK); }
public void validateReadPermission(String serverUrl, String personalAccessToken) { HttpUrl url = buildUrl(serverUrl, "/rest/api/1.0/repos"); doGet(personalAccessToken, url, body -> buildGson().fromJson(body, RepositoryList.class)); }
@Test public void validate_read_permission_success() { server.enqueue(new MockResponse().setResponseCode(200) .setBody(REPOS_BODY)); underTest.validateReadPermission(server.url("/").toString(), "token"); }
@Override public boolean checkExists(String path) { try { if (client.checkExists().forPath(path) != null) { return true; } } catch (Exception ignored) { } return false; }
@Test void testCreateContent4Temp() { String path = "/curatorTest4CrContent/content.data"; String content = "createContentTest"; curatorClient.delete(path); assertThat(curatorClient.checkExists(path), is(false)); assertNull(curatorClient.getContent(path)); curatorClient.createOrUpdate(path, content, true); assertThat(curatorClient.checkExists(path), is(true)); assertEquals(curatorClient.getContent(path), content); }
@Override public ConfigDef config() { return CONFIG_DEF; }
@Test public void testConfig() { HasHeaderKey<SourceRecord> predicate = new HasHeaderKey<>(); predicate.config().validate(Collections.singletonMap("name", "foo")); List<ConfigValue> configs = predicate.config().validate(Collections.singletonMap("name", "")); assertEquals(singletonList("Invalid value for configuration name: String must be non-empty"), configs.get(0).errorMessages()); }
@Override @Transactional(rollbackFor = Exception.class) public void updateSpuStock(Map<Long, Integer> stockIncrCounts) { stockIncrCounts.forEach((id, incCount) -> productSpuMapper.updateStock(id, incCount)); }
@Test public void testUpdateSpuStock() { // 准备参数 Map<Long, Integer> stockIncrCounts = MapUtil.builder(1L, 10).put(2L, -20).build(); // mock 方法(数据) productSpuMapper.insert(randomPojo(ProductSpuDO.class, o ->{ o.setCategoryId(generateId()); o.setBrandId(generateId()); o.setDeliveryTemplateId(generateId()); o.setSort(RandomUtil.randomInt(1,100)); // 限制排序范围 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setVirtualSalesCount(generaInt()); // 限制范围为正整数 o.setPrice(generaInt()); // 限制范围为正整数 o.setMarketPrice(generaInt()); // 限制范围为正整数 o.setCostPrice(generaInt()); // 限制范围为正整数 o.setStock(generaInt()); // 限制范围为正整数 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setSalesCount(generaInt()); // 限制范围为正整数 o.setBrowseCount(generaInt()); // 限制范围为正整数 o.setId(1L).setStock(20); })); productSpuMapper.insert(randomPojo(ProductSpuDO.class, o -> { o.setCategoryId(generateId()); o.setBrandId(generateId()); o.setDeliveryTemplateId(generateId()); o.setSort(RandomUtil.randomInt(1,100)); // 限制排序范围 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setVirtualSalesCount(generaInt()); // 限制范围为正整数 o.setPrice(generaInt()); // 限制范围为正整数 o.setMarketPrice(generaInt()); // 限制范围为正整数 o.setCostPrice(generaInt()); // 限制范围为正整数 o.setStock(generaInt()); // 限制范围为正整数 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setSalesCount(generaInt()); // 限制范围为正整数 o.setBrowseCount(generaInt()); // 限制范围为正整数 o.setId(2L).setStock(30); })); // 调用 productSpuService.updateSpuStock(stockIncrCounts); // 断言 assertEquals(productSpuService.getSpu(1L).getStock(), 30); assertEquals(productSpuService.getSpu(2L).getStock(), 10); }
@Override public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException("RangeSet is immutable"); }
@Test(expected = UnsupportedOperationException.class) public void retainAll() throws Exception { RangeSet rs = new RangeSet(4); RangeSet rs2 = new RangeSet(5); rs.retainAll(rs2); }
public static <T> RetryTransformer<T> of(Retry retry) { return new RetryTransformer<>(retry); }
@Test public void returnOnErrorUsingSingle() throws InterruptedException { RetryConfig config = retryConfig(); Retry retry = Retry.of("testName", config); given(helloWorldService.returnHelloWorld()) .willThrow(new HelloWorldException()); Single.fromCallable(helloWorldService::returnHelloWorld) .compose(RetryTransformer.of(retry)) .test() .await() .assertError(HelloWorldException.class) .assertNotComplete(); Single.fromCallable(helloWorldService::returnHelloWorld) .compose(RetryTransformer.of(retry)) .test() .await() .assertError(HelloWorldException.class) .assertNotComplete(); then(helloWorldService).should(times(6)).returnHelloWorld(); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfFailedCallsWithRetryAttempt()).isEqualTo(2); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isZero(); }
public static int abs(int n) { return (n == Integer.MIN_VALUE) ? 0 : Math.abs(n); }
@Test public void testAbs() { assertEquals(0, Utils.abs(Integer.MIN_VALUE)); assertEquals(10, Utils.abs(-10)); assertEquals(10, Utils.abs(10)); assertEquals(0, Utils.abs(0)); assertEquals(1, Utils.abs(-1)); }
@SuppressWarnings({"CastCanBeRemovedNarrowingVariableType", "unchecked"}) public E relaxedPoll() { final E[] buffer = consumerBuffer; final long index = consumerIndex; final long mask = consumerMask; final long offset = modifiedCalcElementOffset(index, mask); Object e = lvElement(buffer, offset);// LoadLoad if (e == null) { return null; } if (e == JUMP) { final E[] nextBuffer = getNextBuffer(buffer, mask); return newBufferPoll(nextBuffer, index); } soElement(buffer, offset, null); soConsumerIndex(this, index + 2); return (E) e; }
@Test(dataProvider = "populated") public void relaxedPoll_whenPopulated(MpscGrowableArrayQueue<Integer> queue) { assertThat(queue.relaxedPoll()).isNotNull(); assertThat(queue).hasSize(POPULATED_SIZE - 1); }
@InterfaceAudience.Private @VisibleForTesting void cleanLogs(Path dirpath, long retainMillis) throws IOException { long now = Time.now(); RemoteIterator<FileStatus> iter = list(dirpath); while (iter.hasNext()) { FileStatus stat = iter.next(); if (isValidClusterTimeStampDir(stat)) { Path clusterTimeStampPath = stat.getPath(); MutableBoolean appLogDirPresent = new MutableBoolean(false); cleanAppLogDir(clusterTimeStampPath, retainMillis, appLogDirPresent); if (appLogDirPresent.isFalse() && (now - stat.getModificationTime() > retainMillis)) { deleteDir(clusterTimeStampPath); } } } }
@Test void testCleanBuckets() throws Exception { // ClusterTimeStampDir with App Log Dirs Path clusterTimeStampDir1 = new Path(testDoneDirPath, Long.toString(sampleAppIds.get(0).getClusterTimestamp())); Path appDir1 = new Path(new Path(new Path( clusterTimeStampDir1, "0000"), "000"), sampleAppIds.get(0).toString()); Path appDir2 = new Path(new Path(new Path( clusterTimeStampDir1, "0000"), "001"), sampleAppIds.get(1).toString()); Path appDir3 = new Path(new Path(new Path( clusterTimeStampDir1, "0000"), "002"), sampleAppIds.get(2).toString()); Path appDir4 = new Path(new Path(new Path( clusterTimeStampDir1, "0001"), "000"), sampleAppIds.get(3).toString()); // ClusterTimeStampDir with no App Log Dirs Path clusterTimeStampDir2 = new Path(testDoneDirPath, "1235"); // Irrevelant ClusterTimeStampDir Path clusterTimeStampDir3 = new Path(testDoneDirPath, "irrevelant"); Path appDir5 = new Path(new Path(new Path( clusterTimeStampDir3, "0000"), "000"), sampleAppIds.get(4).toString()); fs.mkdirs(appDir1); fs.mkdirs(appDir2); fs.mkdirs(appDir3); fs.mkdirs(appDir4); fs.mkdirs(clusterTimeStampDir2); fs.mkdirs(appDir5); Thread.sleep(2000); store.cleanLogs(testDoneDirPath, 1000); // ClusterTimeStampDir will be removed only if no App Log Dir Present assertTrue(fs.exists(clusterTimeStampDir1)); assertFalse(fs.exists(appDir1)); assertFalse(fs.exists(appDir2)); assertFalse(fs.exists(appDir3)); assertFalse(fs.exists(appDir4)); assertFalse(fs.exists(clusterTimeStampDir2)); assertTrue(fs.exists(appDir5)); store.cleanLogs(testDoneDirPath, 1000); assertFalse(fs.exists(clusterTimeStampDir1)); }
@VisibleForTesting static void validateDefaultTopicFormats(final KsqlConfig config) { validateTopicFormat(config, KsqlConfig.KSQL_DEFAULT_KEY_FORMAT_CONFIG, "key"); validateTopicFormat(config, KsqlConfig.KSQL_DEFAULT_VALUE_FORMAT_CONFIG, "value"); }
@Test public void shouldFailOnInvalidDefaultKeyFormat() { // Given: final KsqlConfig config = configWith(ImmutableMap.of( KsqlConfig.KSQL_DEFAULT_KEY_FORMAT_CONFIG, "bad" )); // When: final Exception e = assertThrows( KsqlException.class, () -> KsqlServerMain.validateDefaultTopicFormats(config) ); // Then: assertThat(e.getMessage(), containsString( "Invalid value for config '" + KsqlConfig.KSQL_DEFAULT_KEY_FORMAT_CONFIG + "': bad")); }
@Override @CacheEvict(cacheNames = RedisKeyConstants.NOTIFY_TEMPLATE, allEntries = true) // allEntries 清空所有缓存,因为 id 不是直接的缓存 code,不好清理 public void deleteNotifyTemplate(Long id) { // 校验存在 validateNotifyTemplateExists(id); // 删除 notifyTemplateMapper.deleteById(id); }
@Test public void testDeleteNotifyTemplate_notExists() { // 准备参数 Long id = randomLongId(); // 调用, 并断言异常 assertServiceException(() -> notifyTemplateService.deleteNotifyTemplate(id), NOTIFY_TEMPLATE_NOT_EXISTS); }
@Override public void validate(CruiseConfig cruiseConfig) { ServerConfig serverConfig = cruiseConfig.server(); String artifactDir = serverConfig.artifactsDir(); if (isEmpty(artifactDir)) { throw new RuntimeException("Please provide a not empty value for artifactsdir"); } if (StringUtils.equals(".", artifactDir) || new File("").getAbsolutePath().equals( new File(artifactDir).getAbsolutePath())) { throw new RuntimeException("artifactsdir should not point to the root of sand box [" + new File(artifactDir).getAbsolutePath() + "]"); } }
@Test public void shouldThrowExceptionWhenUserProvidesDot() throws Exception { CruiseConfig cruiseConfig = new BasicCruiseConfig(); cruiseConfig.setServerConfig(new ServerConfig(".", null)); ArtifactDirValidator dirValidator = new ArtifactDirValidator(); try { dirValidator.validate(cruiseConfig); fail("should throw exception, see dot will make server check out the repository in the wrong place."); } catch (Exception e) { } }
static void activateHttpAndHttpsProxies(Settings settings, SettingsDecrypter decrypter) throws MojoExecutionException { List<Proxy> proxies = new ArrayList<>(2); for (String protocol : ImmutableList.of("http", "https")) { if (areProxyPropertiesSet(protocol)) { continue; } settings.getProxies().stream() .filter(Proxy::isActive) .filter(proxy -> protocol.equals(proxy.getProtocol())) .findFirst() .ifPresent(proxies::add); } if (proxies.isEmpty()) { return; } SettingsDecryptionRequest request = new DefaultSettingsDecryptionRequest().setProxies(proxies); SettingsDecryptionResult result = decrypter.decrypt(request); for (SettingsProblem problem : result.getProblems()) { if (problem.getSeverity() == SettingsProblem.Severity.ERROR || problem.getSeverity() == SettingsProblem.Severity.FATAL) { throw new MojoExecutionException( "Unable to decrypt proxy info from settings.xml: " + problem); } } result.getProxies().forEach(MavenSettingsProxyProvider::setProxyProperties); }
@Test public void testActivateHttpAndHttpsProxies_encryptedProxy() throws MojoExecutionException { MavenSettingsProxyProvider.activateHttpAndHttpsProxies( mixedProxyEncryptedSettings, settingsDecrypter); Assert.assertEquals("password1", System.getProperty("http.proxyPassword")); Assert.assertEquals("password2", System.getProperty("https.proxyPassword")); }
boolean hasFile(String fileReference) { return hasFile(new FileReference(fileReference)); }
@Test public void requireThatNonExistingFileIsNotFound() { assertFalse(fileServer.hasFile("12x")); }
@Override public String ping(RedisClusterNode node) { RedisClient entry = getEntry(node); RFuture<String> f = executorService.readAsync(entry, LongCodec.INSTANCE, RedisCommands.PING); return syncFuture(f); }
@Test public void testClusterPing() { testInCluster(connection -> { RedisClusterNode master = getFirstMaster(connection); String res = connection.ping(master); assertThat(res).isEqualTo("PONG"); }); }
@VisibleForTesting Map<String, Object> getCustomMessageModel(EventNotificationContext ctx, String type, List<MessageSummary> backlog, DateTimeZone timeZone) { final EventNotificationModelData modelData = EventNotificationModelData.of(ctx, backlog); LOG.debug("Custom message model: {}", modelData); final Map<String, Object> objectMap = objectMapperProvider.getForTimeZone(timeZone).convertValue(modelData, TypeReferences.MAP_STRING_OBJECT); objectMap.put("type", type); objectMap.put("http_external_uri", this.httpExternalUri); final Map<String, Object> escapedModelMap = new HashMap<>(); objectMap.forEach((k, v) -> { if (v instanceof String str) { escapedModelMap.put(k, str.replace("\"", "\\\"")); } else { escapedModelMap.put(k, v); } }); LOG.debug("Finalized model map: {}", escapedModelMap); return escapedModelMap; }
@Test public void testEscapedQuotes() { if (eventNotificationContext.eventDefinition().isPresent()) { EventDefinitionDto definition = eventNotificationContext.eventDefinition().get(); definition = definition.toBuilder().description("A Description with \"Double Quotes\"").build(); eventNotificationContext = eventNotificationContext.toBuilder().eventDefinition(definition).build(); } List<MessageSummary> messageSummaries = generateMessageSummaries(50); Map<String, Object> customMessageModel = teamsEventNotification.getCustomMessageModel(eventNotificationContext, notificationConfig.type(), messageSummaries, DateTimeZone.UTC); assertThat(customMessageModel.get("event_definition_description")).isEqualTo("A Description with \\\"Double Quotes\\\""); }
public static String[] parseKey(String groupKey) { StringBuilder sb = new StringBuilder(); String dataId = null; String group = null; String tenant = null; for (int i = 0; i < groupKey.length(); ++i) { char c = groupKey.charAt(i); if ('+' == c) { if (null == dataId) { dataId = sb.toString(); sb.setLength(0); } else if (null == group) { group = sb.toString(); sb.setLength(0); } else { throw new IllegalArgumentException("invalid groupkey:" + groupKey); } } else if ('%' == c) { char next = groupKey.charAt(++i); char nextnext = groupKey.charAt(++i); if ('2' == next && 'B' == nextnext) { sb.append('+'); } else if ('2' == next && '5' == nextnext) { sb.append('%'); } else { throw new IllegalArgumentException("invalid groupkey:" + groupKey); } } else { sb.append(c); } } if (StringUtils.isBlank(group)) { group = sb.toString(); } else { tenant = sb.toString(); } if (group.length() == 0) { throw new IllegalArgumentException("invalid groupkey:" + groupKey); } return new String[] {dataId, group, tenant}; }
@Test void testParseKeyForInvalidStringIndexOutOfBoundsException() { assertThrows(StringIndexOutOfBoundsException.class, () -> { GroupKey.parseKey("++%"); // Method is not expected to return due to exception thrown }); // Method is not expected to return due to exception thrown }
@Override public void createNetwork(KubevirtNetwork network) { checkNotNull(network, ERR_NULL_NETWORK); checkArgument(!Strings.isNullOrEmpty(network.networkId()), ERR_NULL_NETWORK_ID); networkStore.createNetwork(network); log.info(String.format(MSG_NETWORK, network.name(), MSG_CREATED)); }
@Test(expected = NullPointerException.class) public void testCreateNullNetwork() { target.createNetwork(null); }
@Override public void zoomIn() { zoomIn(true); }
@Test public void zoomInTest() { MapViewPosition mapViewPosition = new MapViewPosition(new DisplayModel()); Assert.assertEquals(0, mapViewPosition.getZoomLevel()); mapViewPosition.zoomIn(); Assert.assertEquals((byte) 1, mapViewPosition.getZoomLevel()); mapViewPosition.setZoomLevel(Byte.MAX_VALUE); Assert.assertEquals(Byte.MAX_VALUE, mapViewPosition.getZoomLevel()); mapViewPosition.zoomIn(); Assert.assertEquals(Byte.MAX_VALUE, mapViewPosition.getZoomLevel()); }
public static Predicate decode(byte[] buf) { Objects.requireNonNull(buf, "buf"); Slime slime = com.yahoo.slime.BinaryFormat.decode(buf); return decode(slime.get()); }
@Test void requireThatDecodeEmptyThrows() { try { BinaryFormat.decode(new byte[0]); fail(); } catch (UnsupportedOperationException e) { assertEquals("0", e.getMessage()); } }
int maxCongestionWindow() { return maxCwnd; }
@Test void shouldSetWindowLengthFromTermLength() { final int smallTermLength = 8192; final CubicCongestionControl cubicCongestionControl = new CubicCongestionControl( 0, channelWithWindow, 0, 0, smallTermLength, MTU_LENGTH, null, null, nanoClock, context, countersManager); assertEquals(smallTermLength / 2 / MTU_LENGTH, cubicCongestionControl.maxCongestionWindow()); }
@Override public GetAllResourceTypeInfoResponse getResourceTypeInfo( GetAllResourceTypeInfoRequest request) throws YarnException, IOException { if (request == null) { routerMetrics.incrResourceTypeInfoFailedRetrieved(); String msg = "Missing getResourceTypeInfo request."; RouterAuditLogger.logFailure(user.getShortUserName(), GET_RESOURCETYPEINFO, UNKNOWN, TARGET_CLIENT_RM_SERVICE, msg); RouterServerUtil.logAndThrowException(msg, null); } long startTime = clock.getTime(); ClientMethod remoteMethod = new ClientMethod("getResourceTypeInfo", new Class[] {GetAllResourceTypeInfoRequest.class}, new Object[] {request}); Collection<GetAllResourceTypeInfoResponse> listResourceTypeInfo; try { listResourceTypeInfo = invokeConcurrent(remoteMethod, GetAllResourceTypeInfoResponse.class); } catch (Exception ex) { routerMetrics.incrResourceTypeInfoFailedRetrieved(); String msg = "Unable to get all resource type info node due to exception."; LOG.error(msg, ex); RouterAuditLogger.logFailure(user.getShortUserName(), GET_RESOURCETYPEINFO, UNKNOWN, TARGET_CLIENT_RM_SERVICE, msg); throw ex; } long stopTime = clock.getTime(); routerMetrics.succeededGetResourceTypeInfoRetrieved(stopTime - startTime); RouterAuditLogger.logSuccess(user.getShortUserName(), GET_RESOURCETYPEINFO, TARGET_CLIENT_RM_SERVICE); // Merge the GetAllResourceTypeInfoResponse return RouterYarnClientUtils.mergeResourceTypes(listResourceTypeInfo); }
@Test public void testGetResourceTypeInfoRequest() throws Exception { LOG.info("Test FederationClientInterceptor : Get Resource TypeInfo request."); // null request LambdaTestUtils.intercept(YarnException.class, "Missing getResourceTypeInfo request.", () -> interceptor.getResourceTypeInfo(null)); // normal request. GetAllResourceTypeInfoResponse response = interceptor.getResourceTypeInfo(GetAllResourceTypeInfoRequest.newInstance()); Assert.assertEquals(2, response.getResourceTypeInfo().size()); }
public static List<ExportPackages.Export> parseExports(String exportAttribute) { ParsingContext p = new ParsingContext(exportAttribute.trim()); List<ExportPackages.Export> exports = parseExportPackage(p); if (exports.isEmpty()) { p.fail("Expected a list of exports"); } else if (p.atEnd() == false) { p.fail("Exports not fully processed"); } return exports; }
@SuppressWarnings("unchecked") @Test void require_that_multiple_parameters_for_a_package_is_parsed_correctly() { List<Export> exports = ExportPackageParser.parseExports("exported.package;version=\"1.2.3.sample\";param2=true"); assertEquals(1, exports.size()); Export export = exports.get(0); assertTrue(export.getParameters().containsAll(List.of(versionParameter, new Parameter("param2", "true")))); }
public Project getProject(String gitlabUrl, String pat, Long gitlabProjectId) { String url = format("%s/projects/%s", gitlabUrl, gitlabProjectId); LOG.debug("get project : [{}]", url); Request request = new Request.Builder() .addHeader(PRIVATE_TOKEN, pat) .get() .url(url) .build(); try (Response response = client.newCall(request).execute()) { checkResponseIsSuccessful(response); String body = response.body().string(); LOG.trace("loading project payload result : [{}]", body); return new GsonBuilder().create().fromJson(body, Project.class); } catch (JsonSyntaxException e) { throw new IllegalArgumentException("Could not parse GitLab answer to retrieve a project. Got a non-json payload as result."); } catch (IOException e) { logException(url, e); throw new IllegalStateException(e.getMessage(), e); } }
@Test public void fail_get_project_with_unexpected_io_exception_with_detailed_log() throws IOException { server.shutdown(); assertThatThrownBy(() -> underTest.getProject(gitlabUrl, "token", 0L)) .isInstanceOf(IllegalStateException.class) .hasMessageContaining("Failed to connect to"); assertThat(logTester.logs(Level.INFO).get(0)) .contains("Gitlab API call to [" + server.url("/projects/0") + "] " + "failed with error message : [Failed to connect to " + server.getHostName()); }
@VisibleForTesting static Map<String, Long> getExternalResourceAmountMap(Configuration config) { final Set<String> resourceSet = getExternalResourceSet(config); if (resourceSet.isEmpty()) { return Collections.emptyMap(); } final Map<String, Long> externalResourceAmountMap = new HashMap<>(); for (String resourceName : resourceSet) { final ConfigOption<Long> amountOption = key(ExternalResourceOptions.getAmountConfigOptionForResource(resourceName)) .longType() .noDefaultValue(); final Optional<Long> amountOpt = config.getOptional(amountOption); if (!amountOpt.isPresent()) { LOG.warn( "The amount of the {} should be configured. Will ignore that resource.", resourceName); } else if (amountOpt.get() <= 0) { LOG.warn( "The amount of the {} should be positive while finding {}. Will ignore that resource.", amountOpt.get(), resourceName); } else { externalResourceAmountMap.put(resourceName, amountOpt.get()); } } return externalResourceAmountMap; }
@Test public void testGetExternalResourceAmountMap() { final Configuration config = new Configuration(); config.set( ExternalResourceOptions.EXTERNAL_RESOURCE_LIST, Collections.singletonList(RESOURCE_NAME_1)); config.setLong( ExternalResourceOptions.getAmountConfigOptionForResource(RESOURCE_NAME_1), RESOURCE_AMOUNT_1); final Map<String, Long> externalResourceAmountMap = ExternalResourceUtils.getExternalResourceAmountMap(config); assertThat(externalResourceAmountMap.size(), is(1)); assertTrue(externalResourceAmountMap.containsKey(RESOURCE_NAME_1)); assertThat(externalResourceAmountMap.get(RESOURCE_NAME_1), is(RESOURCE_AMOUNT_1)); }
static ZookeeperRegisterServiceImpl getInstance() { if (instance == null) { synchronized (ZookeeperRegisterServiceImpl.class) { if (instance == null) { instance = new ZookeeperRegisterServiceImpl(); } } } return instance; }
@Test public void getInstance() { ZookeeperRegisterServiceImpl service1 = ZookeeperRegisterServiceImpl.getInstance(); Assertions.assertEquals(service1, service); }
@Override public long getQueryCount() { throw new UnsupportedOperationException("Queries on replicated maps are not supported."); }
@Test(expected = UnsupportedOperationException.class) public void testQueryCount() { localReplicatedMapStats.getQueryCount(); }
@Override @SuppressFBWarnings(value = "EI_EXPOSE_REP") public ImmutableSet<String> getSupportedProperties() { return SUPPORTED_PROPERTIES; }
@Test public void shouldGetSupportedProperties() { // Given: final AvroProperties properties = new AvroProperties(ImmutableMap.of()); // When: final ImmutableSet<String> supportedProperties = properties.getSupportedProperties(); // Then: assertThat(supportedProperties, is(AvroProperties.SUPPORTED_PROPERTIES)); }
public static FileEntriesLayer extraDirectoryLayerConfiguration( Path sourceDirectory, AbsoluteUnixPath targetDirectory, List<String> includes, List<String> excludes, Map<String, FilePermissions> extraDirectoryPermissions, ModificationTimeProvider modificationTimeProvider) throws IOException { FileEntriesLayer.Builder builder = FileEntriesLayer.builder().setName(LayerType.EXTRA_FILES.getName()); Map<PathMatcher, FilePermissions> permissionsPathMatchers = new LinkedHashMap<>(); for (Map.Entry<String, FilePermissions> entry : extraDirectoryPermissions.entrySet()) { permissionsPathMatchers.put( FileSystems.getDefault().getPathMatcher(GLOB_PREFIX + entry.getKey()), entry.getValue()); } DirectoryWalker walker = new DirectoryWalker(sourceDirectory).filterRoot(); // add exclusion filters excludes.stream() .map(pattern -> FileSystems.getDefault().getPathMatcher(GLOB_PREFIX + pattern)) .forEach( pathMatcher -> walker.filter(path -> !pathMatcher.matches(sourceDirectory.relativize(path)))); // add an inclusion filter includes.stream() .map(pattern -> FileSystems.getDefault().getPathMatcher(GLOB_PREFIX + pattern)) .map( pathMatcher -> (Predicate<Path>) path -> pathMatcher.matches(sourceDirectory.relativize(path))) .reduce((matches1, matches2) -> matches1.or(matches2)) .ifPresent(walker::filter); // walk the source tree and add layer entries walker.walk( localPath -> { AbsoluteUnixPath pathOnContainer = targetDirectory.resolve(sourceDirectory.relativize(localPath)); Instant modificationTime = modificationTimeProvider.get(localPath, pathOnContainer); Optional<FilePermissions> permissions = determinePermissions( pathOnContainer, extraDirectoryPermissions, permissionsPathMatchers); if (permissions.isPresent()) { builder.addEntry(localPath, pathOnContainer, permissions.get(), modificationTime); } else { builder.addEntry(localPath, pathOnContainer, modificationTime); } }); return builder.build(); }
@Test public void testExtraDirectoryLayerConfiguration_includes() throws URISyntaxException, IOException { Path extraFilesDirectory = Paths.get(Resources.getResource("core/layer").toURI()); FileEntriesLayer layerConfiguration = JavaContainerBuilderHelper.extraDirectoryLayerConfiguration( extraFilesDirectory, AbsoluteUnixPath.get("/"), Arrays.asList("**/bar", "**/*a*"), Collections.emptyList(), Collections.emptyMap(), (ignored1, ignored2) -> Instant.EPOCH); assertThat(layerConfiguration.getEntries()) .comparingElementsUsing(SOURCE_FILE_OF) .containsExactly( extraFilesDirectory.resolve("a/b/bar"), extraFilesDirectory.resolve("c/cat")); }
private RemotingCommand updateAcl(ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { RemotingCommand response = RemotingCommand.createResponseCommand(null); UpdateAclRequestHeader requestHeader = request.decodeCommandCustomHeader(UpdateAclRequestHeader.class); Subject subject = Subject.of(requestHeader.getSubject()); AclInfo aclInfo = RemotingSerializable.decode(request.getBody(), AclInfo.class); if (aclInfo == null || CollectionUtils.isEmpty(aclInfo.getPolicies())) { throw new AuthorizationException("The body of acl is null"); } Acl acl = AclConverter.convertAcl(aclInfo); if (acl != null && acl.getSubject() == null) { acl.setSubject(subject); } this.brokerController.getAuthorizationMetadataManager().updateAcl(acl) .thenAccept(nil -> response.setCode(ResponseCode.SUCCESS)) .exceptionally(ex -> { LOGGER.error("update acl for {} error", requestHeader.getSubject(), ex); return handleAuthException(response, ex); }) .join(); return response; }
@Test public void testUpdateAcl() throws RemotingCommandException { when(authorizationMetadataManager.updateAcl(any(Acl.class))) .thenReturn(CompletableFuture.completedFuture(null)); UpdateAclRequestHeader updateAclRequestHeader = new UpdateAclRequestHeader(); updateAclRequestHeader.setSubject("User:abc"); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.AUTH_UPDATE_ACL, updateAclRequestHeader); request.setVersion(441); request.addExtField("AccessKey", "rocketmq"); request.makeCustomHeaderToNet(); AclInfo aclInfo = AclInfo.of("User:abc", Arrays.asList("Topic:*"), Arrays.asList("PUB"), Arrays.asList("192.168.0.1"), "Grant"); request.setBody(JSON.toJSONBytes(aclInfo)); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); }
@SuppressWarnings("unchecked") @Override public <T> Attribute<T> attr(AttributeKey<T> key) { ObjectUtil.checkNotNull(key, "key"); DefaultAttribute newAttribute = null; for (;;) { final DefaultAttribute[] attributes = this.attributes; final int index = searchAttributeByKey(attributes, key); final DefaultAttribute[] newAttributes; if (index >= 0) { final DefaultAttribute attribute = attributes[index]; assert attribute.key() == key; if (!attribute.isRemoved()) { return attribute; } // let's try replace the removed attribute with a new one if (newAttribute == null) { newAttribute = new DefaultAttribute<T>(this, key); } final int count = attributes.length; newAttributes = Arrays.copyOf(attributes, count); newAttributes[index] = newAttribute; } else { if (newAttribute == null) { newAttribute = new DefaultAttribute<T>(this, key); } final int count = attributes.length; newAttributes = new DefaultAttribute[count + 1]; orderedCopyOnInsert(attributes, count, newAttributes, newAttribute); } if (ATTRIBUTES_UPDATER.compareAndSet(this, attributes, newAttributes)) { return newAttribute; } } }
@Test public void testGetSetString() { AttributeKey<String> key = AttributeKey.valueOf("Nothing"); Attribute<String> one = map.attr(key); assertSame(one, map.attr(key)); one.setIfAbsent("Whoohoo"); assertSame("Whoohoo", one.get()); one.setIfAbsent("What"); assertNotSame("What", one.get()); one.remove(); assertNull(one.get()); }
@Override public Table getTable(String dbName, String tblName) { JDBCTableName jdbcTable = new JDBCTableName(null, dbName, tblName); return tableInstanceCache.get(jdbcTable, k -> { try (Connection connection = getConnection()) { ResultSet columnSet = schemaResolver.getColumns(connection, dbName, tblName); List<Column> fullSchema = schemaResolver.convertToSRTable(columnSet); List<Column> partitionColumns = Lists.newArrayList(); if (schemaResolver.isSupportPartitionInformation()) { partitionColumns = listPartitionColumns(dbName, tblName, fullSchema); } if (fullSchema.isEmpty()) { return null; } Integer tableId = tableIdCache.getPersistentCache(jdbcTable, j -> ConnectorTableId.CONNECTOR_ID_GENERATOR.getNextId().asInt()); return schemaResolver.getTable(tableId, tblName, fullSchema, partitionColumns, dbName, catalogName, properties); } catch (SQLException | DdlException e) { LOG.warn("get table for JDBC catalog fail!", e); return null; } }); }
@Test public void testGetTableWithPartition() throws SQLException { new Expectations() { { preparedStatement.executeQuery(); result = partitionsResult; minTimes = 0; partitionsInfoTablesResult = new MockResultSet("partitions"); partitionsInfoTablesResult.addColumn("TABLE_NAME", Arrays.asList("partitions")); connection.getMetaData().getTables(anyString, null, null, null); result = partitionsInfoTablesResult; minTimes = 0; } }; try { JDBCMetadata jdbcMetadata = new JDBCMetadata(properties, "catalog", dataSource); Table table = jdbcMetadata.getTable("test", "tbl1"); Assert.assertTrue(table instanceof JDBCTable); Assert.assertFalse(table.getPartitionColumns().isEmpty()); } catch (Exception e) { System.out.println(e.getMessage()); Assert.fail(); } }
@Override public ChannelFuture resetStream(final ChannelHandlerContext ctx, int streamId, long errorCode, ChannelPromise promise) { final Http2Stream stream = connection().stream(streamId); if (stream == null) { return resetUnknownStream(ctx, streamId, errorCode, promise.unvoid()); } return resetStream(ctx, stream, errorCode, promise); }
@Test public void writeMultipleRstFramesForSameStream() throws Exception { handler = newHandler(); when(stream.id()).thenReturn(STREAM_ID); final AtomicBoolean resetSent = new AtomicBoolean(); when(stream.resetSent()).then(new Answer<Http2Stream>() { @Override public Http2Stream answer(InvocationOnMock invocationOnMock) { resetSent.set(true); return stream; } }); when(stream.isResetSent()).then(new Answer<Boolean>() { @Override public Boolean answer(InvocationOnMock invocationOnMock) { return resetSent.get(); } }); when(frameWriter.writeRstStream(eq(ctx), eq(STREAM_ID), anyLong(), any(ChannelPromise.class))) .then(new Answer<ChannelFuture>() { @Override public ChannelFuture answer(InvocationOnMock invocationOnMock) throws Throwable { ChannelPromise promise = invocationOnMock.getArgument(3); return promise.setSuccess(); } }); ChannelPromise promise = new DefaultChannelPromise(channel, ImmediateEventExecutor.INSTANCE); final ChannelPromise promise2 = new DefaultChannelPromise(channel, ImmediateEventExecutor.INSTANCE); promise.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) { handler.resetStream(ctx, STREAM_ID, STREAM_CLOSED.code(), promise2); } }); handler.resetStream(ctx, STREAM_ID, CANCEL.code(), promise); verify(frameWriter).writeRstStream(eq(ctx), eq(STREAM_ID), anyLong(), any(ChannelPromise.class)); assertTrue(promise.isSuccess()); assertTrue(promise2.isSuccess()); }
@Override public Health health() { final Health.Builder health = Health.unknown(); if (!jobRunrProperties.getBackgroundJobServer().isEnabled()) { health .up() .withDetail("backgroundJobServer", "disabled"); } else { final BackgroundJobServer backgroundJobServer = backgroundJobServerProvider.getObject(); if (backgroundJobServer.isRunning()) { health .up() .withDetail("backgroundJobServer", "enabled") .withDetail("backgroundJobServerStatus", "running"); } else { health .down() .withDetail("backgroundJobServer", "enabled") .withDetail("backgroundJobServerStatus", "stopped"); } } return health.build(); }
@Test void givenEnabledBackgroundJobServerAndBackgroundJobServerStopped_ThenHealthIsDown() { when(backgroundJobServerProperties.isEnabled()).thenReturn(true); when(backgroundJobServer.isRunning()).thenReturn(false); assertThat(jobRunrHealthIndicator.health().getStatus()).isEqualTo(Status.DOWN); }
@SuppressWarnings("deprecation") public static <K> KStreamHolder<K> build( final KStreamHolder<K> left, final KStreamHolder<K> right, final StreamStreamJoin<K> join, final RuntimeBuildContext buildContext, final StreamJoinedFactory streamJoinedFactory) { final QueryContext queryContext = join.getProperties().getQueryContext(); final QueryContext.Stacker stacker = QueryContext.Stacker.of(queryContext); final LogicalSchema leftSchema; final LogicalSchema rightSchema; final Formats rightFormats; final Formats leftFormats; if (join.getJoinType().equals(RIGHT)) { leftFormats = join.getRightInternalFormats(); rightFormats = join.getLeftInternalFormats(); leftSchema = right.getSchema(); rightSchema = left.getSchema(); } else { leftFormats = join.getLeftInternalFormats(); rightFormats = join.getRightInternalFormats(); leftSchema = left.getSchema(); rightSchema = right.getSchema(); } final PhysicalSchema leftPhysicalSchema = PhysicalSchema.from( leftSchema, leftFormats.getKeyFeatures(), leftFormats.getValueFeatures() ); final Serde<GenericRow> leftSerde = buildContext.buildValueSerde( leftFormats.getValueFormat(), leftPhysicalSchema, stacker.push(LEFT_SERDE_CTX).getQueryContext() ); final PhysicalSchema rightPhysicalSchema = PhysicalSchema.from( rightSchema, rightFormats.getKeyFeatures(), rightFormats.getValueFeatures() ); final Serde<GenericRow> rightSerde = buildContext.buildValueSerde( rightFormats.getValueFormat(), rightPhysicalSchema, stacker.push(RIGHT_SERDE_CTX).getQueryContext() ); final Serde<K> keySerde = left.getExecutionKeyFactory().buildKeySerde( leftFormats.getKeyFormat(), leftPhysicalSchema, queryContext ); final StreamJoined<K, GenericRow, GenericRow> joined = streamJoinedFactory.create( keySerde, leftSerde, rightSerde, StreamsUtil.buildOpName(queryContext), StreamsUtil.buildOpName(queryContext) ); final JoinParams joinParams = JoinParamsFactory .create(join.getKeyColName(), leftSchema, rightSchema); JoinWindows joinWindows; // Grace, as optional, helps to identify if a user specified the GRACE PERIOD syntax in the // join window. If specified, then we'll call the new KStreams API ofTimeDifferenceAndGrace() // which enables the "spurious" results bugfix with left/outer joins (see KAFKA-10847). if (join.getGraceMillis().isPresent()) { joinWindows = JoinWindows.ofTimeDifferenceAndGrace( join.getBeforeMillis(), join.getGraceMillis().get()); } else { joinWindows = JoinWindows.of(join.getBeforeMillis()); } joinWindows = joinWindows.after(join.getAfterMillis()); final KStream<K, GenericRow> result; switch (join.getJoinType()) { case LEFT: result = left.getStream().leftJoin( right.getStream(), joinParams.getJoiner(), joinWindows, joined); break; case RIGHT: result = right.getStream().leftJoin( left.getStream(), joinParams.getJoiner(), joinWindows, joined); break; case OUTER: result = left.getStream().outerJoin( right.getStream(), joinParams.getJoiner(), joinWindows, joined); break; case INNER: result = left.getStream().join( right.getStream(), joinParams.getJoiner(), joinWindows, joined); break; default: throw new IllegalStateException("invalid join type"); } return left.withStream(result, joinParams.getSchema()); }
@Test public void shouldBuildKeySerdeCorrectly() { // Given: givenInnerJoin(L_KEY); // When: join.build(planBuilder, planInfo); // Then: verify(executionKeyFactory).buildKeySerde(LEFT_FMT.getKeyFormat(), LEFT_PHYSICAL, CTX); }
@Override public boolean retryRequest( HttpRequest request, IOException exception, int execCount, HttpContext context) { if (execCount > maxRetries) { // Do not retry if over max retries return false; } if (nonRetriableExceptions.contains(exception.getClass())) { return false; } else { for (Class<? extends IOException> rejectException : nonRetriableExceptions) { if (rejectException.isInstance(exception)) { return false; } } } if (request instanceof CancellableDependency && ((CancellableDependency) request).isCancelled()) { return false; } // Retry if the request is considered idempotent return Method.isIdempotent(request.getMethod()); }
@Test public void noRetryOnUnknownHost() { HttpGet request = new HttpGet("/"); assertThat(retryStrategy.retryRequest(request, new UnknownHostException(), 1, null)).isFalse(); }
public Host get(final String url) throws HostParserException { final StringReader reader = new StringReader(url); final Protocol parsedProtocol, protocol; if((parsedProtocol = findProtocol(reader, factory)) != null) { protocol = parsedProtocol; } else { protocol = defaultScheme; } final Consumer<HostParserException> parsedProtocolDecorator = e -> e.withProtocol(parsedProtocol); final Host host = new Host(protocol); final URITypes uriType = findURIType(reader); if(uriType == URITypes.Undefined) { // scheme: if(StringUtils.isBlank(protocol.getDefaultHostname())) { throw decorate(new HostParserException(String.format("Missing hostname in URI %s", url)), parsedProtocolDecorator); } return host; } if(uriType == URITypes.Authority) { if(host.getProtocol().isHostnameConfigurable()) { parseAuthority(reader, host, parsedProtocolDecorator); } else { parseRootless(reader, host, parsedProtocolDecorator); } } else if(uriType == URITypes.Rootless) { parseRootless(reader, host, parsedProtocolDecorator); } else if(uriType == URITypes.Absolute) { parseAbsolute(reader, host, parsedProtocolDecorator); } if(log.isDebugEnabled()) { log.debug(String.format("Parsed %s as %s", url, host)); } return host; }
@Test public void parseNonConfigurableEmptyURL() throws HostParserException { final Host host = new HostParser(new ProtocolFactory(Collections.singleton(new TestProtocol(Scheme.https) { @Override public String getDefaultHostname() { return "host"; } @Override public String getDefaultPath() { return "/default-path"; } @Override public boolean isHostnameConfigurable() { return false; } @Override public boolean isPathConfigurable() { return false; } }))).get("https://"); assertEquals("host", host.getHostname()); assertEquals("/default-path", host.getDefaultPath()); }
public static boolean isNameCoveredByPattern( String name, String pattern ) { if ( name == null || name.isEmpty() || pattern == null || pattern.isEmpty() ) { throw new IllegalArgumentException( "Arguments cannot be null or empty." ); } final String needle = name.toLowerCase(); final String hayStack = pattern.toLowerCase(); if ( needle.equals( hayStack )) { return true; } if ( hayStack.startsWith( "*." ) ) { return needle.endsWith( hayStack.substring( 2 ) ); } return false; }
@Test public void testNameCoverageSubdomainWithWildcardOfSameDomain() throws Exception { // setup final String name = "xmpp.example.org"; final String pattern = "*.xmpp.example.org"; // do magic final boolean result = DNSUtil.isNameCoveredByPattern( name, pattern ); // verify assertTrue( result ); }
public String transform() throws ScanException { StringBuilder stringBuilder = new StringBuilder(); compileNode(node, stringBuilder, new Stack<Node>()); return stringBuilder.toString(); }
@Test public void LOGBACK_1101() throws ScanException { String input = "a: {y}"; Node node = makeNode(input); NodeToStringTransformer nodeToStringTransformer = new NodeToStringTransformer(node, propertyContainer0); Assertions.assertEquals("a: {y}", nodeToStringTransformer.transform()); }
public static String jqString(String value, String expression) { return H2Functions.jq(value, expression, JsonNode::asText); }
@Test public void jqString() { String jqString = H2Functions.jqString("{\"a\": \"b\"}", ".a"); assertThat(jqString, is("b")); // on arrays, it will use the first element jqString = H2Functions.jqString("{\"labels\":[{\"key\":\"a\",\"value\":\"aValue\"},{\"key\":\"b\",\"value\":\"bValue\"}]}", ".labels[].value"); assertThat(jqString, is("aValue")); }
@Override public String execute(CommandContext commandContext, String[] args) { if (ArrayUtils.isEmpty(args)) { return "Please input method name, eg: \r\ninvoke xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})\r\n" + "invoke XxxService.xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})\r\n" + "invoke com.xxx.XxxService.xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})"; } Channel channel = commandContext.getRemote(); String service = channel.attr(ChangeTelnet.SERVICE_KEY) != null ? channel.attr(ChangeTelnet.SERVICE_KEY).get() : null; String message = args[0]; int i = message.indexOf("("); if (i < 0 || !message.endsWith(")")) { return "Invalid parameters, format: service.method(args)"; } String method = message.substring(0, i).trim(); String param = message.substring(i + 1, message.length() - 1).trim(); i = method.lastIndexOf("."); if (i >= 0) { service = method.substring(0, i).trim(); method = method.substring(i + 1).trim(); } if (StringUtils.isEmpty(service)) { return "If you want to invoke like [invoke sayHello(\"xxxx\")], please execute cd command first," + " or you can execute it like [invoke IHelloService.sayHello(\"xxxx\")]"; } List<Object> list; try { list = JsonUtils.toJavaList("[" + param + "]", Object.class); } catch (Throwable t) { return "Invalid json argument, cause: " + t.getMessage(); } StringBuilder buf = new StringBuilder(); Method invokeMethod = null; ProviderModel selectedProvider = null; if (isInvokedSelectCommand(channel)) { selectedProvider = channel.attr(INVOKE_METHOD_PROVIDER_KEY).get(); invokeMethod = channel.attr(SelectTelnet.SELECT_METHOD_KEY).get(); } else { for (ProviderModel provider : frameworkModel.getServiceRepository().allProviderModels()) { if (!isServiceMatch(service, provider)) { continue; } selectedProvider = provider; List<Method> methodList = findSameSignatureMethod(provider.getAllMethods(), method, list); if (CollectionUtils.isEmpty(methodList)) { break; } if (methodList.size() == 1) { invokeMethod = methodList.get(0); } else { List<Method> matchMethods = findMatchMethods(methodList, list); if (CollectionUtils.isEmpty(matchMethods)) { break; } if (matchMethods.size() == 1) { invokeMethod = matchMethods.get(0); } else { // exist overridden method channel.attr(INVOKE_METHOD_PROVIDER_KEY).set(provider); channel.attr(INVOKE_METHOD_LIST_KEY).set(matchMethods); channel.attr(INVOKE_MESSAGE_KEY).set(message); printSelectMessage(buf, matchMethods); return buf.toString(); } } break; } } if (!StringUtils.isEmpty(service)) { buf.append("Use default service ").append(service).append('.'); } if (selectedProvider == null) { buf.append("\r\nNo such service ").append(service); return buf.toString(); } if (invokeMethod == null) { buf.append("\r\nNo such method ") .append(method) .append(" in service ") .append(service); return buf.toString(); } try { Object[] array = realize(list.toArray(), invokeMethod.getParameterTypes(), invokeMethod.getGenericParameterTypes()); long start = System.currentTimeMillis(); AppResponse result = new AppResponse(); try { Object o = invokeMethod.invoke(selectedProvider.getServiceInstance(), array); boolean setValueDone = false; if (RpcContext.getServerAttachment().isAsyncStarted()) { AsyncContext asyncContext = RpcContext.getServerAttachment().getAsyncContext(); if (asyncContext instanceof AsyncContextImpl) { CompletableFuture<Object> internalFuture = ((AsyncContextImpl) asyncContext).getInternalFuture(); result.setValue(internalFuture.get()); setValueDone = true; } } if (!setValueDone) { result.setValue(o); } } catch (Throwable t) { result.setException(t); if (t instanceof InterruptedException) { Thread.currentThread().interrupt(); } } finally { RpcContext.removeContext(); } long end = System.currentTimeMillis(); buf.append("\r\nresult: "); buf.append(JsonUtils.toJson(result.recreate())); buf.append("\r\nelapsed: "); buf.append(end - start); buf.append(" ms."); } catch (Throwable t) { return "Failed to invoke method " + invokeMethod.getName() + ", cause: " + StringUtils.toString(t); } return buf.toString(); }
@Test void testInvokeMultiJsonParamMethod() throws RemotingException { defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY).set(DemoService.class.getName()); defaultAttributeMap.attr(SelectTelnet.SELECT_KEY).set(null); given(mockChannel.attr(ChangeTelnet.SERVICE_KEY)) .willReturn(defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY)); given(mockChannel.attr(SelectTelnet.SELECT_KEY)).willReturn(defaultAttributeMap.attr(SelectTelnet.SELECT_KEY)); registerProvider(DemoService.class.getName(), new DemoServiceImpl(), DemoService.class); String param = "{\"name\":\"Dubbo\",\"age\":8},{\"name\":\"Apache\",\"age\":20}"; String result = invoke.execute(mockCommandContext, new String[] {"getPerson(" + param + ")"}); assertTrue(result.contains("result: 28")); defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY).remove(); defaultAttributeMap.attr(SelectTelnet.SELECT_KEY).remove(); }
@Override public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException { for(Path file : files.keySet()) { if(containerService.isContainer(file)) { continue; } callback.delete(file); if(file.getType().contains(Path.Type.upload)) { new B2LargeUploadPartService(session, fileid).delete(file.attributes().getVersionId()); } else { if(file.isDirectory()) { // Delete /.bzEmpty if any final String placeholder; try { placeholder = fileid.getVersionId(file); } catch(NotfoundException e) { log.warn(String.format("Ignore failure %s deleting placeholder file for %s", e, file)); continue; } if(null == placeholder) { continue; } try { session.getClient().deleteFileVersion(containerService.getKey(file), placeholder); } catch(B2ApiException e) { log.warn(String.format("Ignore failure %s deleting placeholder file for %s", e.getMessage(), file)); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot delete {0}", e, file); } } else if(file.isFile()) { try { if(!versioning.isEnabled() || null == file.attributes().getVersionId()) { // Add hide marker if(log.isDebugEnabled()) { log.debug(String.format("Add hide marker %s of %s", file.attributes().getVersionId(), file)); } try { session.getClient().hideFile(fileid.getVersionId(containerService.getContainer(file)), containerService.getKey(file)); } catch(B2ApiException e) { if("already_hidden".equalsIgnoreCase(e.getCode())) { log.warn(String.format("Ignore failure %s hiding file %s already hidden", e.getMessage(), file)); } else { throw e; } } } else { // Delete specific version if(log.isDebugEnabled()) { log.debug(String.format("Delete version %s of %s", file.attributes().getVersionId(), file)); } session.getClient().deleteFileVersion(containerService.getKey(file), file.attributes().getVersionId()); } } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Cannot delete {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot delete {0}", e, file); } } fileid.cache(file, null); } } for(Path file : files.keySet()) { try { if(containerService.isContainer(file)) { callback.delete(file); // Finally delete bucket itself session.getClient().deleteBucket(fileid.getVersionId(file)); } } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Cannot delete {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot delete {0}", e, file); } } }
@Test(expected = NotfoundException.class) public void testDeleteNotFound() throws Exception { final Path bucket = new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new B2DeleteFeature(session, new B2VersionIdProvider(session)).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Timed @Path("/{destination}") @PUT @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) @ManagedAsync @Operation( summary = "Send a message", description = """ Deliver a message to a single recipient. May be authenticated or unauthenticated; if unauthenticated, an unidentifed-access key or group-send endorsement token must be provided, unless the message is a story. """) @ApiResponse(responseCode="200", description="Message was successfully sent", useReturnTypeSchema=true) @ApiResponse( responseCode="401", description="The message is not a story and the authorization, unauthorized access key, or group send endorsement token is missing or incorrect") @ApiResponse( responseCode="404", description="The message is not a story and some the recipient service ID does not correspond to a registered Signal user") @ApiResponse( responseCode = "409", description = "Incorrect set of devices supplied for recipient", content = @Content(schema = @Schema(implementation = AccountMismatchedDevices[].class))) @ApiResponse( responseCode = "410", description = "Mismatched registration ids supplied for some recipient devices", content = @Content(schema = @Schema(implementation = AccountStaleDevices[].class))) public Response sendMessage(@ReadOnly @Auth Optional<AuthenticatedDevice> source, @Parameter(description="The recipient's unidentified access key") @HeaderParam(HeaderUtils.UNIDENTIFIED_ACCESS_KEY) Optional<Anonymous> accessKey, @Parameter(description="A group send endorsement token covering the recipient. Must not be combined with `Unidentified-Access-Key` or set on a story message.") @HeaderParam(HeaderUtils.GROUP_SEND_TOKEN) @Nullable GroupSendTokenHeader groupSendToken, @HeaderParam(HttpHeaders.USER_AGENT) String userAgent, @Parameter(description="If true, deliver the message only to recipients that are online when it is sent") @PathParam("destination") ServiceIdentifier destinationIdentifier, @Parameter(description="If true, the message is a story; access tokens are not checked and sending to nonexistent recipients is permitted") @QueryParam("story") boolean isStory, @Parameter(description="The encrypted message payloads for each recipient device") @NotNull @Valid IncomingMessageList messages, @Context ContainerRequestContext context) throws RateLimitExceededException { final Sample sample = Timer.start(); try { if (source.isEmpty() && accessKey.isEmpty() && groupSendToken == null && !isStory) { throw new WebApplicationException(Response.Status.UNAUTHORIZED); } if (groupSendToken != null) { if (!source.isEmpty() || !accessKey.isEmpty()) { throw new BadRequestException("Group send endorsement tokens should not be combined with other authentication"); } else if (isStory) { throw new BadRequestException("Group send endorsement tokens should not be sent for story messages"); } } final String senderType; if (source.isPresent()) { if (source.get().getAccount().isIdentifiedBy(destinationIdentifier)) { senderType = SENDER_TYPE_SELF; } else { senderType = SENDER_TYPE_IDENTIFIED; } } else { senderType = SENDER_TYPE_UNIDENTIFIED; } boolean isSyncMessage = source.isPresent() && source.get().getAccount().isIdentifiedBy(destinationIdentifier); if (isSyncMessage && destinationIdentifier.identityType() == IdentityType.PNI) { throw new WebApplicationException(Status.FORBIDDEN); } Optional<Account> destination; if (!isSyncMessage) { destination = accountsManager.getByServiceIdentifier(destinationIdentifier); } else { destination = source.map(AuthenticatedDevice::getAccount); } final Optional<Response> spamCheck = spamChecker.checkForSpam( context, source.map(AuthenticatedDevice::getAccount), destination); if (spamCheck.isPresent()) { return spamCheck.get(); } final Optional<byte[]> spamReportToken = switch (senderType) { case SENDER_TYPE_IDENTIFIED -> reportSpamTokenProvider.makeReportSpamToken(context, source.get(), destination); default -> Optional.empty(); }; int totalContentLength = 0; for (final IncomingMessage message : messages.messages()) { int contentLength = 0; if (StringUtils.isNotEmpty(message.content())) { contentLength += message.content().length(); } validateContentLength(contentLength, false, userAgent); validateEnvelopeType(message.type(), userAgent); totalContentLength += contentLength; } try { rateLimiters.getInboundMessageBytes().validate(destinationIdentifier.uuid(), totalContentLength); } catch (final RateLimitExceededException e) { if (dynamicConfigurationManager.getConfiguration().getInboundMessageByteLimitConfiguration().enforceInboundLimit()) { messageByteLimitEstimator.add(destinationIdentifier.uuid().toString()); throw e; } } try { if (isStory) { // Stories will be checked by the client; we bypass access checks here for stories. } else if (groupSendToken != null) { checkGroupSendToken(List.of(destinationIdentifier.toLibsignal()), groupSendToken); if (destination.isEmpty()) { throw new NotFoundException(); } } else { OptionalAccess.verify(source.map(AuthenticatedDevice::getAccount), accessKey, destination, destinationIdentifier); } boolean needsSync = !isSyncMessage && source.isPresent() && source.get().getAccount().getDevices().size() > 1; // We return 200 when stories are sent to a non-existent account. Since story sends bypass OptionalAccess.verify // we leak information about whether a destination UUID exists if we return any other code (e.g. 404) from // these requests. if (isStory && destination.isEmpty()) { return Response.ok(new SendMessageResponse(needsSync)).build(); } // if destination is empty we would either throw an exception in OptionalAccess.verify when isStory is false // or else return a 200 response when isStory is true. assert destination.isPresent(); if (source.isPresent() && !isSyncMessage) { checkMessageRateLimit(source.get(), destination.get(), userAgent); } if (isStory) { rateLimiters.getStoriesLimiter().validate(destination.get().getUuid()); } final Set<Byte> excludedDeviceIds; if (isSyncMessage) { excludedDeviceIds = Set.of(source.get().getAuthenticatedDevice().getId()); } else { excludedDeviceIds = Collections.emptySet(); } DestinationDeviceValidator.validateCompleteDeviceList(destination.get(), messages.messages().stream().map(IncomingMessage::destinationDeviceId).collect(Collectors.toSet()), excludedDeviceIds); DestinationDeviceValidator.validateRegistrationIds(destination.get(), messages.messages(), IncomingMessage::destinationDeviceId, IncomingMessage::destinationRegistrationId, destination.get().getPhoneNumberIdentifier().equals(destinationIdentifier.uuid())); final String authType; if (SENDER_TYPE_IDENTIFIED.equals(senderType)) { authType = AUTH_TYPE_IDENTIFIED; } else if (isStory) { authType = AUTH_TYPE_STORY; } else if (groupSendToken != null) { authType = AUTH_TYPE_GROUP_SEND_TOKEN; } else { authType = AUTH_TYPE_ACCESS_KEY; } final List<Tag> tags = List.of(UserAgentTagUtil.getPlatformTag(userAgent), Tag.of(ENDPOINT_TYPE_TAG_NAME, ENDPOINT_TYPE_SINGLE), Tag.of(EPHEMERAL_TAG_NAME, String.valueOf(messages.online())), Tag.of(SENDER_TYPE_TAG_NAME, senderType), Tag.of(AUTH_TYPE_TAG_NAME, authType), Tag.of(IDENTITY_TYPE_TAG_NAME, destinationIdentifier.identityType().name())); for (IncomingMessage incomingMessage : messages.messages()) { Optional<Device> destinationDevice = destination.get().getDevice(incomingMessage.destinationDeviceId()); if (destinationDevice.isPresent()) { Metrics.counter(SENT_MESSAGE_COUNTER_NAME, tags).increment(); sendIndividualMessage( source, destination.get(), destinationDevice.get(), destinationIdentifier, messages.timestamp(), messages.online(), isStory, messages.urgent(), incomingMessage, userAgent, spamReportToken); } } return Response.ok(new SendMessageResponse(needsSync)).build(); } catch (MismatchedDevicesException e) { throw new WebApplicationException(Response.status(409) .type(MediaType.APPLICATION_JSON_TYPE) .entity(new MismatchedDevices(e.getMissingDevices(), e.getExtraDevices())) .build()); } catch (StaleDevicesException e) { throw new WebApplicationException(Response.status(410) .type(MediaType.APPLICATION_JSON) .entity(new StaleDevices(e.getStaleDevices())) .build()); } } finally { sample.stop(SEND_MESSAGE_LATENCY_TIMER); } }
@Test void testMultiRecipientMessageWithGroupSendEndorsements() throws Exception { final List<Recipient> recipients = List.of( new Recipient(SINGLE_DEVICE_ACI_ID, SINGLE_DEVICE_ID1, SINGLE_DEVICE_REG_ID1, new byte[48]), new Recipient(MULTI_DEVICE_ACI_ID, MULTI_DEVICE_ID1, MULTI_DEVICE_REG_ID1, new byte[48]), new Recipient(MULTI_DEVICE_ACI_ID, MULTI_DEVICE_ID2, MULTI_DEVICE_REG_ID2, new byte[48]), new Recipient(MULTI_DEVICE_ACI_ID, MULTI_DEVICE_ID3, MULTI_DEVICE_REG_ID3, new byte[48])); // initialize our binary payload and create an input stream byte[] buffer = new byte[2048]; InputStream stream = initializeMultiPayload(recipients, buffer, true); clock.pin(Instant.parse("2024-04-09T12:00:00.00Z")); try (final Response response = resources .getJerseyTest() .target("/v1/messages/multi_recipient") .queryParam("online", true) .queryParam("ts", 1663798405641L) .queryParam("story", false) .queryParam("urgent", false) .request() .header(HttpHeaders.USER_AGENT, "FIXME") .header(HeaderUtils.GROUP_SEND_TOKEN, AuthHelper.validGroupSendTokenHeader( serverSecretParams, List.of(SINGLE_DEVICE_ACI_ID, MULTI_DEVICE_ACI_ID), Instant.parse("2024-04-10T00:00:00.00Z"))) .put(Entity.entity(stream, MultiRecipientMessageProvider.MEDIA_TYPE))) { assertThat("Unexpected response", response.getStatus(), is(equalTo(200))); verify(messageSender, exactly(4)) .sendMessage( any(), any(), argThat(env -> !env.hasSourceUuid() && !env.hasSourceDevice()), eq(true)); SendMultiRecipientMessageResponse smrmr = response.readEntity(SendMultiRecipientMessageResponse.class); assertThat(smrmr.uuids404(), is(empty())); } }
public void unregisterDashboard(String id) { removeGrantsForTarget(grnRegistry.newGRN(GRNTypes.DASHBOARD, id)); }
@Test void unregisterDashboard() { entityOwnershipService.unregisterDashboard("1234"); assertGrantRemoval(GRNTypes.DASHBOARD, "1234"); }
@Override public Iterable<Token> tokenize(String input, Language language, StemMode stemMode, boolean removeAccents) { if (input.isEmpty()) return List.of(); List<Token> tokens = textToTokens(input, analyzerFactory.getAnalyzer(language, stemMode, removeAccents)); log.log(Level.FINEST, () -> "Tokenized '" + language + "' text='" + input + "' into: n=" + tokens.size() + ", tokens=" + tokens); return tokens; }
@Test public void testAnalyzerConfiguration() { String languageCode = Language.ENGLISH.languageCode(); LuceneAnalysisConfig enConfig = new LuceneAnalysisConfig.Builder() .configDir(Optional.of(FileReference.mockFileReferenceForUnitTesting(new File(".")))) .analysis( Map.of(languageCode, new LuceneAnalysisConfig .Analysis .Builder() .tokenFilters(List.of( new LuceneAnalysisConfig .Analysis .TokenFilters .Builder() .name("englishMinimalStem"), new LuceneAnalysisConfig .Analysis .TokenFilters .Builder() .name("uppercase")))) ).build(); LuceneLinguistics linguistics = new LuceneLinguistics(enConfig, new ComponentRegistry<>()); Iterable<Token> tokens = linguistics .getTokenizer() .tokenize("Dogs and cats", Language.ENGLISH, StemMode.ALL, false); assertEquals(List.of("DOG", "AND", "CAT"), tokenStrings(tokens)); }
public static boolean isValidRootUrl(String url) { UrlValidator validator = new CustomUrlValidator(); return validator.isValid(url); }
@Test public void fragmentIsForbidden() { // this url will be used as a root url and so will be concatenated with other part, fragment part is not allowed assertFalse(UrlHelper.isValidRootUrl("http://jenkins#fragment")); assertFalse(UrlHelper.isValidRootUrl("http://jenkins.com#fragment")); }
public static String initCacheDir(String namespace, NacosClientProperties properties) { String jmSnapshotPath = properties.getProperty(JM_SNAPSHOT_PATH_PROPERTY); String namingCacheRegistryDir = ""; if (properties.getProperty(PropertyKeyConst.NAMING_CACHE_REGISTRY_DIR) != null) { namingCacheRegistryDir = File.separator + properties.getProperty(PropertyKeyConst.NAMING_CACHE_REGISTRY_DIR); } if (!StringUtils.isBlank(jmSnapshotPath)) { cacheDir = jmSnapshotPath + File.separator + FILE_PATH_NACOS + namingCacheRegistryDir + File.separator + FILE_PATH_NAMING + File.separator + namespace; } else { cacheDir = properties.getProperty(USER_HOME_PROPERTY) + File.separator + FILE_PATH_NACOS + namingCacheRegistryDir + File.separator + FILE_PATH_NAMING + File.separator + namespace; } return cacheDir; }
@Test void testInitCacheDirWithJmSnapshotPathRootAndWithoutCache() { System.setProperty("JM.SNAPSHOT.PATH", "/home/snapshot"); String actual = CacheDirUtil.initCacheDir("test", NacosClientProperties.PROTOTYPE.derive()); assertEquals("/home/snapshot/nacos/naming/test", actual); }
protected int calculateConcurency() { final int customLimit = filterConcurrencyCustom.get(); return customLimit != DEFAULT_FILTER_CONCURRENCY_LIMIT ? customLimit : filterConcurrencyDefault.get(); }
@Test void validateFilterSpecificConcurrencyLimitOverride() { config.setProperty("zuul.filter.concurrency.limit.default", 7000); config.setProperty("zuul.ConcInboundFilter.in.concurrency.limit", 4300); final int[] limit = {0}; class ConcInboundFilter extends BaseFilter { @Override public Observable applyAsync(ZuulMessage input) { limit[0] = calculateConcurency(); return Observable.just("Done"); } @Override public FilterType filterType() { return FilterType.INBOUND; } @Override public boolean shouldFilter(ZuulMessage msg) { return true; } } new ConcInboundFilter().applyAsync(new ZuulMessageImpl(new SessionContext(), new Headers())); Truth.assertThat(limit[0]).isEqualTo(4300); }
public void reset() { currentPosition = 0; elementIndex = -1; nextElement = 0; advance(); }
@Test public void returnsValues() { GapEncodedVariableLengthIntegerReader reader = reader(1, 10, 100, 105, 107, 200); assertValues(reader, 1, 10, 100, 105, 107, 200); reader.reset(); assertValues(reader, 1, 10, 100, 105, 107, 200); }
@Override public Health health() { Map<String, Health> healths = circuitBreakerRegistry.getAllCircuitBreakers().stream() .filter(this::isRegisterHealthIndicator) .collect(Collectors.toMap(CircuitBreaker::getName, this::mapBackendMonitorState)); Status status = this.statusAggregator.getAggregateStatus(healths.values().stream().map(Health::getStatus).collect(Collectors.toSet())); return Health.status(status).withDetails(healths).build(); }
@Test public void healthMetricsAndConfig() { // given CircuitBreakerConfig config = mock(CircuitBreakerConfig.class); CircuitBreakerRegistry registry = mock(CircuitBreakerRegistry.class); CircuitBreaker.Metrics metrics = mock(CircuitBreaker.Metrics.class); CircuitBreaker circuitBreaker = mock(CircuitBreaker.class); CircuitBreakerConfigurationProperties.InstanceProperties instanceProperties = mock(CircuitBreakerConfigurationProperties.InstanceProperties.class); CircuitBreakerConfigurationProperties circuitBreakerProperties = mock( CircuitBreakerConfigurationProperties.class); CircuitBreakersHealthIndicator healthIndicator = new CircuitBreakersHealthIndicator(registry, circuitBreakerProperties, new SimpleStatusAggregator()); //when when(config.getFailureRateThreshold()).thenReturn(30f); when(metrics.getFailureRate()).thenReturn(20f); when(metrics.getSlowCallRate()).thenReturn(20f); when(config.getSlowCallRateThreshold()).thenReturn(50f); when(metrics.getNumberOfBufferedCalls()).thenReturn(100); when(metrics.getNumberOfFailedCalls()).thenReturn(20); when(metrics.getNumberOfSlowCalls()).thenReturn(20); when(metrics.getNumberOfNotPermittedCalls()).thenReturn(0L); when(registry.getAllCircuitBreakers()).thenReturn(Set.of(circuitBreaker)); when(circuitBreaker.getName()).thenReturn("test"); when(circuitBreakerProperties.findCircuitBreakerProperties("test")) .thenReturn(Optional.of(instanceProperties)); when(instanceProperties.getRegisterHealthIndicator()).thenReturn(true); when(instanceProperties.getAllowHealthIndicatorToFail()).thenReturn(true); when(circuitBreaker.getMetrics()).thenReturn(metrics); when(circuitBreaker.getCircuitBreakerConfig()).thenReturn(config); when(circuitBreaker.getState()).thenReturn(CLOSED, OPEN, HALF_OPEN, CLOSED); // then Health health = healthIndicator.health(); then(health.getStatus()).isEqualTo(Status.UP); then(health.getDetails()).containsKey("test"); then(health.getDetails().get("test")).isInstanceOf(Health.class); then(((Health) health.getDetails().get("test")).getDetails()) .contains( entry("failureRate", "20.0%"), entry("slowCallRate", "20.0%"), entry("slowCallRateThreshold", "50.0%"), entry("failureRateThreshold", "30.0%"), entry("bufferedCalls", 100), entry("slowCalls", 20), entry("failedCalls", 20), entry("notPermittedCalls", 0L) ); }
public FEELFnResult<List> invoke(@ParameterName( "list" ) List list, @ParameterName( "position" ) BigDecimal position, @ParameterName( "newItem" ) Object newItem) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } if ( position == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", "cannot be null")); } if ( position.intValue() == 0 ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", "cannot be zero (parameter 'position' is 1-based)")); } if ( position.abs().intValue() > list.size() ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", "inconsistent with 'list' size")); } // spec requires us to return a new list final List<Object> result = new ArrayList<>( list ); if( position.intValue() > 0 ) { result.add( position.intValue() - 1, newItem ); } else { result.add( list.size() + position.intValue(), newItem ); } return FEELFnResult.ofResult( result ); }
@Test void invokePositionNull() { FunctionTestUtil.assertResultError(insertBeforeFunction.invoke(Collections.emptyList(), null, new Object()), InvalidParametersEvent.class); }
public static String resolveUrlWithEmbedPlayer(final String apiUrl) throws IOException, ReCaptchaException { final String response = NewPipe.getDownloader().get("https://w.soundcloud.com/player/?url=" + Utils.encodeUrlUtf8(apiUrl), SoundCloud.getLocalization()).responseBody(); return Jsoup.parse(response).select("link[rel=\"canonical\"]").first() .attr("abs:href"); }
@Test void resolveUrlWithEmbedPlayerTest() throws Exception { assertEquals("https://soundcloud.com/trapcity", SoundcloudParsingHelper.resolveUrlWithEmbedPlayer("https://api.soundcloud.com/users/26057743")); assertEquals("https://soundcloud.com/nocopyrightsounds", SoundcloudParsingHelper.resolveUrlWithEmbedPlayer("https://api.soundcloud.com/users/16069159")); assertEquals("https://soundcloud.com/trapcity", SoundcloudParsingHelper.resolveUrlWithEmbedPlayer("https://api-v2.soundcloud.com/users/26057743")); assertEquals("https://soundcloud.com/nocopyrightsounds", SoundcloudParsingHelper.resolveUrlWithEmbedPlayer("https://api-v2.soundcloud.com/users/16069159")); }
@Override public List<ValueMetaInterface> getValueMetaList() { List<ValueMetaInterface> copy; lock.readLock().lock(); try { copy = new ArrayList<>( valueMetaList ); } finally { lock.readLock().unlock(); } // kept for backward compatibility return Collections.unmodifiableList( copy ); }
@Test public void testGetValueMetaList() { List<ValueMetaInterface> list = rowMeta.getValueMetaList(); assertTrue( list.contains( string ) ); assertTrue( list.contains( integer ) ); assertTrue( list.contains( date ) ); }
@Override public String description() { return "OffsetDateTime.timeLineOrder()"; }
@Test void should_have_description() { assertThat(comparator.description()).isEqualTo("OffsetDateTime.timeLineOrder()"); }
@Override public Set<MappedFieldTypeDTO> fieldTypesByStreamIds(Collection<String> streamIds, TimeRange timeRange) { final Set<String> indexSets = streamService.indexSetIdsByIds(streamIds); final Set<String> indexNames = this.indexLookup.indexNamesForStreamsInTimeRange(ImmutableSet.copyOf(streamIds), timeRange); final Set<FieldTypeDTO> fieldTypeDTOs = this.indexFieldTypesService.findForIndexSets(indexSets) .stream() .filter(fieldTypes -> indexNames.contains(fieldTypes.indexName())) .flatMap(fieldTypes -> fieldTypes.fields().stream()) .filter(fieldTypeDTO -> !streamAwareFieldTypes || !Collections.disjoint(fieldTypeDTO.streams(), streamIds)) .collect(Collectors.toSet()); return mergeCompoundFieldTypes(fieldTypeDTOs.stream() .map(this::mapPhysicalFieldType)); }
@Test public void fieldsOfSameTypeDoNotReturnCompoundTypeIfPropertiesAreDifferent() { final List<IndexFieldTypesDTO> fieldTypes = ImmutableList.of( createIndexTypes( "deadbeef", "testIndex", FieldTypeDTO.builder().fieldName("field1").physicalType("keyword").streams(Set.of("stream1")).build() ), createIndexTypes( "affeaffe", "testIndex2", FieldTypeDTO.builder().fieldName("field1").physicalType("text").streams(Set.of("stream1")).build() ) ); when(indexFieldTypesService.findForIndexSets(Collections.singleton("indexSetId"))).thenReturn(fieldTypes); when(indexLookup.indexNamesForStreamsInTimeRange(Collections.singleton("stream1"), RelativeRange.allTime())).thenReturn(ImmutableSet.of("testIndex", "testIndex2")); final Set<MappedFieldTypeDTO> result = this.mappedFieldTypesService.fieldTypesByStreamIds(Collections.singleton("stream1"), RelativeRange.allTime()); assertThat(result).containsExactlyInAnyOrder( MappedFieldTypeDTO.create("field1", FieldTypes.Type.createType("string", ImmutableSet.of())) ); }