focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static boolean isOSX() { return getOS().toUpperCase().contains( "OS X" ); }
@Test public void testIsOSX() { assertEquals( SystemUtils.IS_OS_MAC_OSX, Const.isOSX() ); }
@Override public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { return wrapper(bean); }
@Test public void testPostProcessBeforeInitialization() { BeanFactory beanFactory = mock(BeanFactory.class); doAnswer(invocation -> { Class<?> clazz = invocation.getArgument(0); if (clazz.equals(BlockingLoadBalancerClient.class)) { return mock(BlockingLoadBalancerClient.class); } if (clazz.equals(LoadBalancerProperties.class)) { return mock(LoadBalancerProperties.class); } if (clazz.equals(LoadBalancerClientFactory.class)) { return mock(LoadBalancerClientFactory.class); } return null; }).when(beanFactory).getBean(any(Class.class)); ObjectProvider objectProvider = mock(ObjectProvider.class); doReturn(new PolarisLoadBalancerFeignRequestTransformer()).when(objectProvider).getObject(); doReturn(objectProvider).when(beanFactory).getBeanProvider(LoadBalancerFeignRequestTransformer.class); enhancedFeignBeanPostProcessor.setBeanFactory(beanFactory); // isNeedWrap(bean) == false Object bean1 = new Object(); Object bean = enhancedFeignBeanPostProcessor.postProcessBeforeInitialization(bean1, "bean1"); assertThat(bean).isNotInstanceOfAny(EnhancedFeignClient.class); // bean instanceOf Client.class Client bean2 = mock(Client.class); bean = enhancedFeignBeanPostProcessor.postProcessBeforeInitialization(bean2, "bean2"); assertThat(bean).isInstanceOf(EnhancedFeignClient.class); // bean instanceOf FeignBlockingLoadBalancerClient.class FeignBlockingLoadBalancerClient bean4 = mock(FeignBlockingLoadBalancerClient.class); doReturn(mock(Client.class)).when(bean4).getDelegate(); bean = enhancedFeignBeanPostProcessor.postProcessBeforeInitialization(bean4, "bean4"); assertThat(bean).isInstanceOf(FeignBlockingLoadBalancerClient.class); assertThat(((FeignBlockingLoadBalancerClient) bean).getDelegate()).isInstanceOf(EnhancedFeignClient.class); }
public static GrpcUpstream buildDefaultGrpcUpstream(final String host, final Integer port) { return GrpcUpstream.builder().upstreamUrl(buildUrl(host, port)) .weight(DEFAULT_WEIGHT).timestamp(System.currentTimeMillis()) .status(Objects.nonNull(port) && StringUtils.isNotBlank(host)) .build(); }
@Test public void buildDefaultGrpcUpstream() { GrpcUpstream grpcUpstream = CommonUpstreamUtils.buildDefaultGrpcUpstream(HOST, PORT); Assert.assertNotNull(grpcUpstream); Assert.assertEquals(HOST + ":" + PORT, grpcUpstream.getUpstreamUrl()); Assert.assertNull(grpcUpstream.getProtocol()); }
@Override public void flush() { }
@Test public void flush() { mSensorsAPI.flush(); }
private Collection<String> getDataSourceNames(final Collection<ShardingTableRuleConfiguration> tableRuleConfigs, final Collection<ShardingAutoTableRuleConfiguration> autoTableRuleConfigs, final Collection<String> dataSourceNames) { if (tableRuleConfigs.isEmpty() && autoTableRuleConfigs.isEmpty()) { return dataSourceNames; } if (tableRuleConfigs.stream().map(ShardingTableRuleConfiguration::getActualDataNodes).anyMatch(each -> null == each || each.isEmpty())) { return dataSourceNames; } Collection<String> result = new LinkedHashSet<>(); tableRuleConfigs.forEach(each -> result.addAll(getDataSourceNames(each))); autoTableRuleConfigs.forEach(each -> result.addAll(getDataSourceNames(each))); return result; }
@Test void assertGetDataSourceNamesWithoutShardingAutoTables() { ShardingRuleConfiguration shardingRuleConfig = new ShardingRuleConfiguration(); ShardingTableRuleConfiguration shardingTableRuleConfig = new ShardingTableRuleConfiguration("LOGIC_TABLE", "ds_${0..1}.table_${0..2}"); shardingRuleConfig.getTables().add(shardingTableRuleConfig); ShardingRule shardingRule = new ShardingRule(shardingRuleConfig, createDataSources(), mock(ComputeNodeInstanceContext.class)); assertThat(shardingRule.getDataSourceNames(), is(new LinkedHashSet<>(Arrays.asList("ds_0", "ds_1")))); }
static <K, V> List<V> getOrCreateListValue(Map<K, List<V>> map, K key) { return map.computeIfAbsent(key, k -> new LinkedList<>()); }
@Test public void testGetOrCreateListValue() { Map<String, List<String>> map = new HashMap<>(); List<String> fooList = KafkaAdminClient.getOrCreateListValue(map, "foo"); assertNotNull(fooList); fooList.add("a"); fooList.add("b"); List<String> fooList2 = KafkaAdminClient.getOrCreateListValue(map, "foo"); assertEquals(fooList, fooList2); assertTrue(fooList2.contains("a")); assertTrue(fooList2.contains("b")); List<String> barList = KafkaAdminClient.getOrCreateListValue(map, "bar"); assertNotNull(barList); assertTrue(barList.isEmpty()); }
@Override public List<NacosConfigDataResource> resolveProfileSpecific( ConfigDataLocationResolverContext resolverContext, ConfigDataLocation location, Profiles profiles) throws ConfigDataLocationNotFoundException { NacosConfigProperties properties = loadProperties(resolverContext); ConfigurableBootstrapContext bootstrapContext = resolverContext .getBootstrapContext(); bootstrapContext.registerIfAbsent(NacosConfigProperties.class, InstanceSupplier.of(properties)); registerConfigManager(properties, bootstrapContext); return loadConfigDataResources(location, profiles, properties); }
@Test void whenNoneInBootstrapContext_thenCreateNewConfigClientProperties() { when(bootstrapContext.isRegistered(eq(NacosConfigProperties.class))) .thenReturn(false); when(bootstrapContext.get(eq(NacosConfigProperties.class))) .thenReturn(new NacosConfigProperties()); List<NacosConfigDataResource> resources = this.resolver.resolveProfileSpecific( context, ConfigDataLocation.of("nacos:test.yml"), mock(Profiles.class)); assertThat(resources).hasSize(1); verify(bootstrapContext, times(0)).get(eq(NacosConfigProperties.class)); NacosConfigDataResource resource = resources.get(0); assertThat(resource.getConfig().getGroup()).isEqualTo("DEFAULT_GROUP"); assertThat(resource.getConfig().getDataId()).isEqualTo("test.yml"); }
static void checkManifestPlatform( BuildContext buildContext, ContainerConfigurationTemplate containerConfig) throws PlatformNotFoundInBaseImageException { Optional<Path> path = buildContext.getBaseImageConfiguration().getTarPath(); String baseImageName = path.map(Path::toString) .orElse(buildContext.getBaseImageConfiguration().getImage().toString()); Set<Platform> platforms = buildContext.getContainerConfiguration().getPlatforms(); Verify.verify(!platforms.isEmpty()); if (platforms.size() != 1) { String msg = String.format( "cannot build for multiple platforms since the base image '%s' is not a manifest list.", baseImageName); throw new PlatformNotFoundInBaseImageException(msg); } else { Platform platform = platforms.iterator().next(); if (!platform.getArchitecture().equals(containerConfig.getArchitecture()) || !platform.getOs().equals(containerConfig.getOs())) { // Unfortunately, "platforms" has amd64/linux by default even if the user didn't explicitly // configure it. Skip reporting to suppress false alarm. if (!(platform.getArchitecture().equals("amd64") && platform.getOs().equals("linux"))) { String msg = String.format( "the configured platform (%s/%s) doesn't match the platform (%s/%s) of the base image (%s)", platform.getArchitecture(), platform.getOs(), containerConfig.getArchitecture(), containerConfig.getOs(), baseImageName); throw new PlatformNotFoundInBaseImageException(msg); } } } }
@Test public void testCheckManifestPlatform_multiplePlatformsConfigured() { Mockito.when(containerConfig.getPlatforms()) .thenReturn(ImmutableSet.of(new Platform("amd64", "linux"), new Platform("arch", "os"))); Exception ex = assertThrows( PlatformNotFoundInBaseImageException.class, () -> PlatformChecker.checkManifestPlatform( buildContext, new ContainerConfigurationTemplate())); assertThat(ex) .hasMessageThat() .isEqualTo( "cannot build for multiple platforms since the base image 'scratch' is not a manifest list."); }
abstract List<String> parseJobID() throws IOException;
@Test public void testParseStreaming() throws IOException { String errFileName = "src/test/data/status/streaming"; JarJobIDParser jarJobIDParser = new JarJobIDParser(errFileName, new Configuration()); List<String> jobs = jarJobIDParser.parseJobID(); Assert.assertEquals(jobs.size(), 1); }
@Udf(description = "Returns a masked version of the input string. All characters except for the" + " first n will be replaced according to the default masking rules.") @SuppressWarnings("MethodMayBeStatic") // Invoked via reflection public String mask( @UdfParameter("input STRING to be masked") final String input, @UdfParameter("number of characters to keep unmasked at the start") final int numChars ) { return doMask(new Masker(), input, numChars); }
@Test public void shouldNotMaskAnyCharsIfLengthTooLong() { final String result = udf.mask("AbCd#$123xy Z", 999); assertThat(result, is("AbCd#$123xy Z")); }
@JsonProperty("progress") public int progress() { if (indices.isEmpty()) { return 100; // avoid division by zero. No indices == migration is immediately done } final BigDecimal sum = indices.stream() .filter(i -> i.progress() != null) .map(RemoteReindexMigration::indexProgress) .reduce(BigDecimal.ZERO, BigDecimal::add); return sum.divide(BigDecimal.valueOf(indices.size()), 4, RoundingMode.HALF_UP).scaleByPowerOfTen(2).intValue(); }
@Test void testProgressOneIndex() { final RemoteReindexMigration migration = withIndices( index("one", RemoteReindexingMigrationAdapter.Status.FINISHED) ); Assertions.assertThat(migration.progress()).isEqualTo(100); }
@InvokeOnHeader(Web3jConstants.ETH_GET_FILTER_CHANGES) void ethGetFilterChanges(Message message) throws IOException { BigInteger filterId = message.getHeader(Web3jConstants.FILTER_ID, configuration::getFilterId, BigInteger.class); Request<?, EthLog> request = web3j.ethGetFilterChanges(filterId); setRequestId(message, request); EthLog response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getLogs()); } }
@Test public void ethGetFilterChangesTest() throws Exception { EthLog response = Mockito.mock(EthLog.class); Mockito.when(mockWeb3j.ethGetFilterChanges(any())).thenReturn(request); Mockito.when(request.send()).thenReturn(response); Mockito.when(response.getLogs()).thenReturn(Collections.EMPTY_LIST); Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.ETH_GET_FILTER_CHANGES); template.send(exchange); List body = exchange.getIn().getBody(List.class); assertTrue(body.isEmpty()); }
public ForComputation forComputation(String computation) { return new ForComputation(computation); }
@Test public void testMultipleShardsOfKey() throws Exception { TestStateTag tag = new TestStateTag("tag1"); WindmillStateCache.ForKeyAndFamily key1CacheShard1 = cache .forComputation(COMPUTATION) .forKey(computationKey(COMPUTATION, "key1", 1), 0L, 0L) .forFamily(STATE_FAMILY); WindmillStateCache.ForKeyAndFamily key1CacheShard2 = cache .forComputation(COMPUTATION) .forKey(computationKey(COMPUTATION, "key1", 2), 0L, 0L) .forFamily(STATE_FAMILY); WindmillStateCache.ForKeyAndFamily key2CacheShard1 = cache .forComputation(COMPUTATION) .forKey(computationKey(COMPUTATION, "key2", 1), 0L, 0L) .forFamily(STATE_FAMILY); TestState state1 = new TestState("g1"); key1CacheShard1.put(StateNamespaces.global(), tag, state1, 2); key1CacheShard1.persist(); assertEquals(Optional.of(state1), key1CacheShard1.get(StateNamespaces.global(), tag)); key1CacheShard1 = cache .forComputation(COMPUTATION) .forKey(computationKey(COMPUTATION, "key1", 1), 0L, 1L) .forFamily(STATE_FAMILY); assertEquals(Optional.of(state1), key1CacheShard1.get(StateNamespaces.global(), tag)); assertEquals(Optional.empty(), key1CacheShard2.get(StateNamespaces.global(), tag)); assertEquals(Optional.empty(), key2CacheShard1.get(StateNamespaces.global(), tag)); TestState state2 = new TestState("g2"); key1CacheShard2.put(StateNamespaces.global(), tag, state2, 2); assertEquals(Optional.of(state2), key1CacheShard2.get(StateNamespaces.global(), tag)); key1CacheShard2.persist(); key1CacheShard2 = cache .forComputation(COMPUTATION) .forKey(computationKey(COMPUTATION, "key1", 2), 0L, 20L) .forFamily(STATE_FAMILY); assertEquals(Optional.of(state2), key1CacheShard2.get(StateNamespaces.global(), tag)); assertEquals(Optional.of(state1), key1CacheShard1.get(StateNamespaces.global(), tag)); assertEquals(Optional.empty(), key2CacheShard1.get(StateNamespaces.global(), tag)); }
@Override public void handle(CommitterEvent event) { try { eventQueue.put(event); } catch (InterruptedException e) { throw new YarnRuntimeException(e); } }
@Test public void testFailure() throws Exception { AppContext mockContext = mock(AppContext.class); OutputCommitter mockCommitter = mock(OutputCommitter.class); Clock mockClock = mock(Clock.class); CommitterEventHandler handler = new CommitterEventHandler(mockContext, mockCommitter, new TestingRMHeartbeatHandler()); YarnConfiguration conf = new YarnConfiguration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); JobContext mockJobContext = mock(JobContext.class); ApplicationAttemptId attemptid = ApplicationAttemptId.fromString("appattempt_1234567890000_0001_0"); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(attemptid.getApplicationId())); WaitForItHandler waitForItHandler = new WaitForItHandler(); when(mockContext.getApplicationID()).thenReturn(attemptid.getApplicationId()); when(mockContext.getApplicationAttemptId()).thenReturn(attemptid); when(mockContext.getEventHandler()).thenReturn(waitForItHandler); when(mockContext.getClock()).thenReturn(mockClock); doThrow(new YarnRuntimeException("Intentional Failure")).when(mockCommitter) .commitJob(any(JobContext.class)); handler.init(conf); handler.start(); try { handler.handle(new CommitterJobCommitEvent(jobId, mockJobContext)); String user = UserGroupInformation.getCurrentUser().getShortUserName(); Path startCommitFile = MRApps.getStartJobCommitFile(conf, user, jobId); Path endCommitSuccessFile = MRApps.getEndJobCommitSuccessFile(conf, user, jobId); Path endCommitFailureFile = MRApps.getEndJobCommitFailureFile(conf, user, jobId); Event e = waitForItHandler.getAndClearEvent(); assertNotNull(e); assertTrue(e instanceof JobCommitFailedEvent); FileSystem fs = FileSystem.get(conf); assertTrue(fs.exists(startCommitFile)); assertFalse(fs.exists(endCommitSuccessFile)); assertTrue(fs.exists(endCommitFailureFile)); verify(mockCommitter).commitJob(any(JobContext.class)); } finally { handler.stop(); } }
public Page<Certificate> getAllCertificates(int pageIndex, int pageSize) { return certificateRepository.findAll(PageRequest.of(pageIndex, pageSize, Sort.by(Sort.Direction.ASC, "activeUntil"))); }
@Test public void getAllCertificates() { when(certificateRepositoryMock.findAll(PageRequest.of(1, 10, Sort.by(Sort.Direction.ASC, "activeUntil")))).thenReturn(getPageCertificates()); Page<Certificate> result = certificateServiceMock.getAllCertificates(1, 10); verify(certificateRepositoryMock, times(1)).findAll(PageRequest.of(1, 10, Sort.by(Sort.Direction.ASC, "activeUntil"))); assertNotNull(result); assertEquals(1, result.getTotalPages()); assertEquals(2, result.getContent().size()); }
public static VersionRange parse(String rangeString) { validateRangeString(rangeString); Inclusiveness minVersionInclusiveness = rangeString.startsWith("[") ? Inclusiveness.INCLUSIVE : Inclusiveness.EXCLUSIVE; Inclusiveness maxVersionInclusiveness = rangeString.endsWith("]") ? Inclusiveness.INCLUSIVE : Inclusiveness.EXCLUSIVE; int commaIndex = rangeString.indexOf(','); String minVersionString = rangeString.substring(1, commaIndex).trim(); Version minVersion; if (minVersionString.isEmpty()) { minVersionInclusiveness = Inclusiveness.EXCLUSIVE; minVersion = Version.minimum(); } else { minVersion = Version.fromString(minVersionString); } String maxVersionString = rangeString.substring(commaIndex + 1, rangeString.length() - 1).trim(); Version maxVersion; if (maxVersionString.isEmpty()) { maxVersionInclusiveness = Inclusiveness.EXCLUSIVE; maxVersion = Version.maximum(); } else { maxVersion = Version.fromString(maxVersionString); } if (!minVersion.isLessThan(maxVersion)) { throw new IllegalArgumentException( String.format( "Min version in range must be less than max version in range, got '%s'", rangeString)); } return builder() .setMinVersion(minVersion) .setMinVersionInclusiveness(minVersionInclusiveness) .setMaxVersion(maxVersion) .setMaxVersionInclusiveness(maxVersionInclusiveness) .build(); }
@Test public void parse_withRangeNotStartingWithParenthesis_throwsIllegalArgumentException() { IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> VersionRange.parse(",1.0]")); assertThat(exception) .hasMessageThat() .isEqualTo("Version range must start with '[' or '(', got ',1.0]'"); }
public Stream<Path> listAllChildrenOnClasspath(Class<?> clazz, String... subFolder) { return listAllChildrenOnClasspath(toFolder(clazz, subFolder)); }
@Test void canListChildrenInJar() { try(ClassPathResourceProvider resourceProvider = new ClassPathResourceProvider()) { final Stream<String> folderItems = resourceProvider .listAllChildrenOnClasspath(Test.class) .map(path -> path.getFileName().toString()); assertThat(folderItems).contains("Test.class", "Tags.class"); } }
@Override protected Optional<ErrorResponse> filter(DiscFilterRequest req) { var certs = req.getClientCertificateChain(); log.fine(() -> "Certificate chain contains %d elements".formatted(certs.size())); if (certs.isEmpty()) { log.fine("Missing client certificate"); return Optional.of(new ErrorResponse(Response.Status.UNAUTHORIZED, "Unauthorized")); } if (legacyMode) { log.fine("Legacy mode validation complete"); ClientPrincipal.attachToRequest(req, Set.of(), Set.of(READ, WRITE)); return Optional.empty(); } var permission = Permission.getRequiredPermission(req).orElse(null); if (permission == null) return Optional.of(new ErrorResponse(Response.Status.FORBIDDEN, "Forbidden")); var clientCert = certs.get(0); var clientIds = new TreeSet<String>(); var permissions = new TreeSet<Permission>(); for (Client c : allowedClients) { if (!c.permissions().contains(permission)) continue; if (!c.certificates().contains(clientCert)) continue; clientIds.add(c.id()); permissions.addAll(c.permissions()); } if (clientIds.isEmpty()) return Optional.of(new ErrorResponse(Response.Status.FORBIDDEN, "Forbidden")); ClientPrincipal.attachToRequest(req, clientIds, permissions); return Optional.empty(); }
@Test void fails_on_client_with_invalid_permissions() { var req = FilterTestUtils.newRequestBuilder() .withMethod(Method.POST) .withClientCertificate(SEARCH_CERT) .build(); var responseHandler = new MockResponseHandler(); newFilterWithClientsConfig().filter(req, responseHandler); assertNotNull(responseHandler.getResponse()); assertEquals(FORBIDDEN, responseHandler.getResponse().getStatus()); }
public int read() { int b = -1; if (position < length) { b = buffer.getByte(offset + position) & 0xFF; ++position; } return b; }
@Test void shouldCorrectlyConvertBytesToPositiveIntegers() { final byte[] data = { (byte)-1, 0 }; final DirectBuffer buffer = new UnsafeBuffer(data); final DirectBufferInputStream inputStream = new DirectBufferInputStream(buffer); assertEquals(inputStream.read(), 255); }
public boolean allowsLock(UUID ownerId) { Preconditions.checkNotNull(ownerId); boolean notLocked = isLeaseExpired() || !isLocked(); return notLocked || allowsUnlock(ownerId); }
@Test(expected = NullPointerException.class) public void testAllowsLock_nullTransactionId() { LockGuard stateLock = LockGuard.NOT_LOCKED; stateLock.allowsLock(null); }
@PublicAPI(usage = ACCESS) public Set<Dependency> getDirectDependenciesToSelf() { return reverseDependencies.getDirectDependenciesTo(this); }
@Test public void direct_dependencies_to_self_by_references() { JavaClass javaClass = importClasses(AReferencingB.class, BReferencedByA.class).get(BReferencedByA.class); assertReferencesFromAToB(javaClass.getDirectDependenciesToSelf()); }
public static BadRequestException appNamespaceNotExists(String appId, String namespaceName) { return new BadRequestException("appNamespace not exist for appId:%s namespaceName:%s", appId, namespaceName); }
@Test public void testAppNamespaceNotExists() { BadRequestException appNamespaceNotExists = BadRequestException.appNamespaceNotExists(appId, namespaceName); assertEquals("appNamespace not exist for appId:app-1001 namespaceName:application", appNamespaceNotExists.getMessage()); }
@Override public Boolean convertStringToValue(final String value) { return Boolean.valueOf(value); }
@Test void testConvertStringToValue() { assertThat(allowNonRestoredStateQueryParameter.convertValueToString(false)) .isEqualTo("false"); assertThat(allowNonRestoredStateQueryParameter.convertValueToString(true)) .isEqualTo("true"); }
public static Mode compressionMode(int min, int max) { if (min >= 0 && max < 1 << 30) return Mode.COMPRESSED_POSITIVE; if (min > -1 << 29 && max < 1 << 29) return Mode.COMPRESSED; return Mode.NONE; }
@Test public void testMode() { assertEquals(Mode.NONE, IntegerCompressor.compressionMode(-0x80000000, 0x00000000)); assertEquals(Mode.NONE, IntegerCompressor.compressionMode(-0x20000000, 0x00000000)); assertEquals(Mode.NONE, IntegerCompressor.compressionMode(-0x00000001, 0x3fffffff)); assertEquals(Mode.COMPRESSED, IntegerCompressor.compressionMode(-0x1fffffff, 0x1fffffff)); assertEquals(Mode.COMPRESSED, IntegerCompressor.compressionMode(-0x1fffffff, -0x1fffffff)); assertEquals(Mode.COMPRESSED, IntegerCompressor.compressionMode(-0x1fffffff, 0x00000000)); assertEquals(Mode.COMPRESSED, IntegerCompressor.compressionMode(-0x00000001, 0x1fffffff)); assertEquals(Mode.COMPRESSED_POSITIVE, IntegerCompressor.compressionMode( 0x00000000, 0x00000000)); assertEquals(Mode.COMPRESSED_POSITIVE, IntegerCompressor.compressionMode( 0x00000000, 0x3fffffff)); assertEquals(Mode.COMPRESSED_POSITIVE, IntegerCompressor.compressionMode( 0x3fffffff, 0x3fffffff)); }
public ColumnId getColumnId() { return columnId; }
@Test public void testColumnDeserialization() { String str = "{\"name\": \"test\"}"; Column column = GsonUtils.GSON.fromJson(str, Column.class); Assert.assertEquals("test", column.getColumnId().getId()); }
@Override public void upgrade() { if (clusterConfigService.get(MigrationCompleted.class) != null) { LOG.debug("Migration already completed."); return; } final Set<String> viewIds = new HashSet<>(); final FindIterable<Document> documents = viewsCollection.find(); boolean viewMigrated; for (final Document view : documents) { viewMigrated = false; final Document states = view.get("state", Document.class); for (Map.Entry<String, Object> obj : states.entrySet()) { final Document state = (Document) obj.getValue(); if (state.get("widgets") instanceof List) { @SuppressWarnings("unchecked") final List<Document> widgets = (List) state.get("widgets"); for (final Document widget : widgets) { final String type = widget.getString("type"); if (type.equals("aggregation")) { final Document config = widget.get("config", Document.class); final Document formatSettings = config.get("formatting_settings", Document.class); if (formatSettings == null) { continue; } final Object charColorsObj = formatSettings.get("chart_colors"); if (charColorsObj == null) { continue; } viewMigrated = true; @SuppressWarnings({"unchecked", "rawtypes"}) final Map<String, String> chartColors = (Map) charColorsObj; List<Document> chartColorSettings = chartColors.entrySet().stream().map(entry -> { final Document chartColorFieldSetting = new Document(); chartColorFieldSetting.put("field_name", entry.getKey()); chartColorFieldSetting.put("chart_color", entry.getValue()); return chartColorFieldSetting; }).collect(Collectors.toList()); formatSettings.put("chart_colors", chartColorSettings); config.put("formatting_settings", formatSettings); widget.put("config", config); } } if (viewMigrated) { state.put("widgets", widgets); } } } if (viewMigrated) { viewsCollection.updateOne(new BasicDBObject("_id", view.getObjectId("_id")), new Document("$set", view)); final String viewId = view.getObjectId("_id").toString(); viewIds.add(viewId); } } LOG.info("Migration completed. {} views where migrated.", viewIds.size()); clusterConfigService.write(V20190127111728_MigrateWidgetFormatSettings.MigrationCompleted.create( viewIds.size(), viewIds)); }
@Test @MongoDBFixtures("V20190127111728_MigrateWidgetFormatSettings_withMultipleColorMappings.json") public void testMigrationWithMultipleChartColorMapping() { final BasicDBObject dbQuery1 = new BasicDBObject(); dbQuery1.put("_id", new ObjectId("5e2ee372b22d7970576b2eb3")); final MongoCollection<Document> collection = mongoDB.mongoConnection() .getMongoDatabase() .getCollection("views"); migration.upgrade(); final FindIterable<Document> views = collection.find(dbQuery1); final Document view1 = views.first(); @SuppressWarnings("unchecked") final List<Document> widgets1 = (List) view1.get("state", Document.class).get("2c67cc0f-c62e-47c1-8b70-e3198925e6bc", Document.class).get("widgets"); assertThat(widgets1.size()).isEqualTo(2); Set<Document> aggregationWidgets =widgets1.stream().filter(w -> w.getString("type") .equals("aggregation")).collect(Collectors.toSet()); assertThat(aggregationWidgets.size()).isEqualTo(1); final Document aggregationWidget = aggregationWidgets.iterator().next(); final Document config = aggregationWidget.get("config", Document.class); final Document formattingSettings = config.get("formatting_settings", Document.class); @SuppressWarnings("unchecked") final List<Document> chartColors = (List) formattingSettings.get("chart_colors", List.class); assertThat(chartColors.size()).isEqualTo(4); final Document chartColor1 = chartColors.get(0); assertThat(chartColor1.getString("field_name")).isEqualTo("count()"); assertThat(chartColor1.getString("chart_color")).isEqualTo("#e91e63"); final Document chartColor2 = chartColors.get(1); assertThat(chartColor2.getString("field_name")).isEqualTo("avg(fields)"); assertThat(chartColor2.getString("chart_color")).isEqualTo("#e81e63"); final Document chartColor3 = chartColors.get(2); assertThat(chartColor3.getString("field_name")).isEqualTo("mean(man)"); assertThat(chartColor3.getString("chart_color")).isEqualTo("#e91f63"); final Document chartColor4 = chartColors.get(3); assertThat(chartColor4.getString("field_name")).isEqualTo("total(win)"); assertThat(chartColor4.getString("chart_color")).isEqualTo("#e91fff"); }
static ClosableResolver<AwsEndpoint> compositeQueryResolver( final ClusterResolver<AwsEndpoint> remoteResolver, final ClusterResolver<AwsEndpoint> localResolver, final EurekaClientConfig clientConfig, final EurekaTransportConfig transportConfig, final InstanceInfo myInstanceInfo, final EndpointRandomizer randomizer) { String[] availZones = clientConfig.getAvailabilityZones(clientConfig.getRegion()); String myZone = InstanceInfo.getZone(availZones, myInstanceInfo); ClusterResolver<AwsEndpoint> compositeResolver = new ClusterResolver<AwsEndpoint>() { @Override public String getRegion() { return clientConfig.getRegion(); } @Override public List<AwsEndpoint> getClusterEndpoints() { List<AwsEndpoint> result = localResolver.getClusterEndpoints(); if (result.isEmpty()) { result = remoteResolver.getClusterEndpoints(); } return result; } }; return new AsyncResolver<>( EurekaClientNames.QUERY, new ZoneAffinityClusterResolver(compositeResolver, myZone, true, randomizer), transportConfig.getAsyncExecutorThreadPoolSize(), transportConfig.getAsyncResolverRefreshIntervalMs(), transportConfig.getAsyncResolverWarmUpTimeoutMs() ); }
@Test public void testCanonicalResolver() throws Exception { when(clientConfig.getEurekaServerURLContext()).thenReturn("context"); when(clientConfig.getRegion()).thenReturn("region"); when(transportConfig.getAsyncExecutorThreadPoolSize()).thenReturn(3); when(transportConfig.getAsyncResolverRefreshIntervalMs()).thenReturn(400); when(transportConfig.getAsyncResolverWarmUpTimeoutMs()).thenReturn(400); Applications applications = InstanceInfoGenerator.newBuilder(5, "eurekaRead", "someOther").build().toApplications(); String vipAddress = applications.getRegisteredApplications("eurekaRead").getInstances().get(0).getVIPAddress(); ApplicationsResolver.ApplicationsSource applicationsSource = mock(ApplicationsResolver.ApplicationsSource.class); when(applicationsSource.getApplications(anyInt(), eq(TimeUnit.SECONDS))) .thenReturn(null) // first time .thenReturn(applications); // subsequent times EurekaHttpClientFactory remoteResolverClientFactory = mock(EurekaHttpClientFactory.class); EurekaHttpClient httpClient = mock(EurekaHttpClient.class); when(remoteResolverClientFactory.newClient()).thenReturn(httpClient); when(httpClient.getVip(vipAddress)).thenReturn(EurekaHttpResponse.anEurekaHttpResponse(200, applications).build()); EurekaHttpResolver remoteResolver = spy(new TestEurekaHttpResolver(clientConfig, transportConfig, remoteResolverClientFactory, vipAddress)); when(transportConfig.getReadClusterVip()).thenReturn(vipAddress); ApplicationsResolver localResolver = spy(new ApplicationsResolver( clientConfig, transportConfig, applicationsSource, transportConfig.getReadClusterVip())); ClosableResolver resolver = null; try { resolver = EurekaHttpClients.compositeQueryResolver( remoteResolver, localResolver, clientConfig, transportConfig, applicationInfoManager.getInfo(), randomizer ); List endpoints = resolver.getClusterEndpoints(); assertThat(endpoints.size(), equalTo(applications.getInstancesByVirtualHostName(vipAddress).size())); verify(remoteResolver, times(1)).getClusterEndpoints(); verify(localResolver, times(1)).getClusterEndpoints(); // wait for the second cycle that hits the app source verify(applicationsSource, timeout(3000).times(2)).getApplications(anyInt(), eq(TimeUnit.SECONDS)); endpoints = resolver.getClusterEndpoints(); assertThat(endpoints.size(), equalTo(applications.getInstancesByVirtualHostName(vipAddress).size())); verify(remoteResolver, times(1)).getClusterEndpoints(); verify(localResolver, times(2)).getClusterEndpoints(); } finally { if (resolver != null) { resolver.shutdown(); } } }
public static IntArrayList rangeClosed(int startIncl, int endIncl) { return range(startIncl, endIncl + 1); }
@Test public void testRangeClosed() { assertEquals(from(3, 4, 5, 6, 7), ArrayUtil.rangeClosed(3, 7)); assertEquals(from(-3, -2, -1), ArrayUtil.rangeClosed(-3, -1)); assertEquals(from(5), ArrayUtil.rangeClosed(5, 5)); }
public static FileStatus shrinkStatus(FileStatus origStat) { if (origStat.isDirectory() || origStat.getLen() == 0 || !(origStat instanceof LocatedFileStatus)) { return origStat; } else { BlockLocation[] blockLocations = ((LocatedFileStatus)origStat).getBlockLocations(); BlockLocation[] locs = new BlockLocation[blockLocations.length]; int i = 0; for (BlockLocation location : blockLocations) { locs[i++] = new BlockLocation(location); } LocatedFileStatus newStat = new LocatedFileStatus(origStat, locs); return newStat; } }
@Test public void testShrinkStatus() throws IOException { Configuration conf = getConfiguration(); MockFileSystem mockFs = (MockFileSystem) new Path("test:///").getFileSystem(conf); Path dir1 = new Path("test:/a1"); RemoteIterator<LocatedFileStatus> statuses = mockFs.listLocatedStatus(dir1); boolean verified = false; while (statuses.hasNext()) { LocatedFileStatus orig = statuses.next(); LocatedFileStatus shrink = (LocatedFileStatus)FileInputFormat.shrinkStatus(orig); Assert.assertTrue(orig.equals(shrink)); if (shrink.getBlockLocations() != null) { Assert.assertEquals(orig.getBlockLocations().length, shrink.getBlockLocations().length); for (int i = 0; i < shrink.getBlockLocations().length; i++) { verified = true; BlockLocation location = shrink.getBlockLocations()[i]; BlockLocation actual = orig.getBlockLocations()[i]; Assert.assertNotNull(((HdfsBlockLocation)actual).getLocatedBlock()); Assert.assertEquals(BlockLocation.class.getName(), location.getClass().getName()); Assert.assertArrayEquals(actual.getHosts(), location.getHosts()); Assert.assertArrayEquals(actual.getCachedHosts(), location.getCachedHosts()); Assert.assertArrayEquals(actual.getStorageIds(), location.getStorageIds()); Assert.assertArrayEquals(actual.getStorageTypes(), location.getStorageTypes()); Assert.assertArrayEquals(actual.getTopologyPaths(), location.getTopologyPaths()); Assert.assertArrayEquals(actual.getNames(), location.getNames()); Assert.assertEquals(actual.getLength(), location.getLength()); Assert.assertEquals(actual.getOffset(), location.getOffset()); Assert.assertEquals(actual.isCorrupt(), location.isCorrupt()); } } else { Assert.assertTrue(orig.getBlockLocations() == null); } } Assert.assertTrue(verified); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } else if (!(obj instanceof Tag)) { return false; } Tag other = (Tag) obj; if (this.key == null) { if (other.key != null) { return false; } } else if (!this.key.equals(other.key)) { return false; } else if (this.value == null) { if (other.value != null) { return false; } } else if (!this.value.equals(other.value)) { return false; } return true; }
@Test public void equalsTest() { Tag tag1 = new Tag(KEY, VALUE); Tag tag2 = new Tag(KEY, VALUE); Tag tag3 = new Tag(KEY, KEY); Tag tag4 = new Tag(VALUE, VALUE); TestUtils.equalsTest(tag1, tag2); TestUtils.notEqualsTest(tag1, tag3); TestUtils.notEqualsTest(tag1, tag4); TestUtils.notEqualsTest(tag1, new Object()); TestUtils.notEqualsTest(tag1, null); }
@Deprecated public static org.apache.rocketmq.common.message.Message convertToRocketMessage( ObjectMapper objectMapper, String charset, String destination, org.springframework.messaging.Message message) { Object payloadObj = message.getPayload(); byte[] payloads; if (payloadObj instanceof String) { payloads = ((String) payloadObj).getBytes(Charset.forName(charset)); } else if (payloadObj instanceof byte[]) { payloads = (byte[]) message.getPayload(); } else { try { String jsonObj = objectMapper.writeValueAsString(payloadObj); payloads = jsonObj.getBytes(Charset.forName(charset)); } catch (Exception e) { throw new RuntimeException("convert to RocketMQ message failed.", e); } } return getAndWrapMessage(destination, message.getHeaders(), payloads); }
@Test public void testPayload() { String charset = "UTF-8"; String destination = "test-topic"; Message msgWithStringPayload = MessageBuilder.withPayload("test").build(); org.apache.rocketmq.common.message.Message rocketMsg1 = RocketMQUtil.convertToRocketMessage(objectMapper, charset, destination, msgWithStringPayload); Message msgWithBytePayload = MessageBuilder.withPayload("test".getBytes()).build(); org.apache.rocketmq.common.message.Message rocketMsg2 = RocketMQUtil.convertToRocketMessage(objectMapper, charset, destination, msgWithBytePayload); assertTrue(Arrays.equals(((String) msgWithStringPayload.getPayload()).getBytes(), rocketMsg1.getBody())); assertTrue(Arrays.equals((byte[]) msgWithBytePayload.getPayload(), rocketMsg2.getBody())); }
@Override public String toString() { return "AuthPathApplyDTO{" + "appName='" + appName + '\'' + ", path='" + path + '\'' + '}'; }
@Test public void testToString() { final String result = authPathApplyDTOUnderTest.toString(); assertEquals("AuthPathApplyDTO{appName='shenyu', path='/'}", result); }
private static String getResourcePath(String rawPath) { if (rawPath.charAt(0) == '/') { return rawPath.substring(1); } return rawPath; }
@Test public void testCollectionResource() throws Exception { String idl = moduleDir + FS + IDLS_DIR + FS + "testCollection.restspec.json"; Set<BuilderSpec> builderSpecs = generateBuilderSpec(new String[] {idl}); Assert.assertNotNull(builderSpecs); Assert.assertTrue(builderSpecs.size() == 15); List<String> expectedMethods = Arrays.asList("actionAnotherAction", "actionSomeAction", "actionVoidAction", "batchGet", "create", "delete", "findBySearch", "get", "getAll", "partialUpdate", "update"); List<String> actualMethods = new ArrayList<>(); CollectionRootBuilderSpec rootBuilder = null; CollectionRootBuilderSpec subRootBuilder = null; FinderBuilderSpec finderBuilder = null; List<ActionBuilderSpec> actionBuilders = new ArrayList<>(); List<RestMethodBuilderSpec> basicMethodBuilders = new ArrayList<>(); for (BuilderSpec spec : builderSpecs) { if (spec instanceof RootBuilderSpec) { Assert.assertTrue(spec instanceof CollectionRootBuilderSpec); CollectionRootBuilderSpec collSpec = (CollectionRootBuilderSpec)spec; if (collSpec.getResourcePath().indexOf('/') >= 0 ) { subRootBuilder = collSpec; } else { rootBuilder = collSpec; } } else if (spec instanceof FinderBuilderSpec) { finderBuilder = (FinderBuilderSpec) spec; } else if (spec instanceof ActionBuilderSpec) { actionBuilders.add((ActionBuilderSpec) spec); } else if (spec instanceof RestMethodBuilderSpec) { basicMethodBuilders.add((RestMethodBuilderSpec) spec); } else { Assert.fail("There should not be any other builder spec generated!"); } } // assert sub resource root builder spec Assert.assertNotNull(subRootBuilder); Assert.assertEquals(subRootBuilder.getSourceIdlName(), idl); Assert.assertEquals(subRootBuilder.getResourcePath(), "testCollection/{testCollectionId}/testCollectionSub"); Assert.assertEquals(subRootBuilder.getParentRootBuilder(), rootBuilder); Assert.assertNotNull(subRootBuilder.getRestMethods()); Assert.assertTrue(subRootBuilder.getRestMethods().size() == 2); Assert.assertTrue(subRootBuilder.getFinders().isEmpty()); Assert.assertTrue(subRootBuilder.getResourceActions().isEmpty()); Assert.assertTrue(subRootBuilder.getEntityActions().isEmpty()); Assert.assertTrue(subRootBuilder.getSubresources().isEmpty()); // assert root builder spec Assert.assertNotNull(rootBuilder); Assert.assertEquals(rootBuilder.getSourceIdlName(), idl); Assert.assertEquals(rootBuilder.getResourcePath(), "testCollection"); Assert.assertNotNull(rootBuilder.getRestMethods()); Assert.assertTrue(rootBuilder.getRestMethods().size() == 7); for (RootBuilderMethodSpec method : rootBuilder.getRestMethods()) { actualMethods.add(method.getName()); Assert.assertEquals(method.getReturn().getRootBuilderMethod(), method); } Assert.assertNotNull(rootBuilder.getFinders()); Assert.assertTrue(rootBuilder.getFinders().size() == 1); actualMethods.add(rootBuilder.getFinders().get(0).getName()); Assert.assertNotNull(rootBuilder.getResourceActions()); Assert.assertTrue(rootBuilder.getResourceActions().size() == 1); actualMethods.add(rootBuilder.getResourceActions().get(0).getName()); Assert.assertNotNull(rootBuilder.getEntityActions()); Assert.assertTrue(rootBuilder.getEntityActions().size() == 2); actualMethods.add(rootBuilder.getEntityActions().get(0).getName()); actualMethods.add(rootBuilder.getEntityActions().get(1).getName()); Assert.assertNotNull(rootBuilder.getSubresources()); Assert.assertTrue(rootBuilder.getSubresources().size() == 1); Collections.sort(actualMethods); Assert.assertEquals(actualMethods, expectedMethods); // assert finder builder spec Assert.assertNotNull(finderBuilder); Assert.assertEquals("search", finderBuilder.getFinderName()); Assert.assertNotNull(finderBuilder.getQueryParamMethods()); Assert.assertTrue(finderBuilder.hasBindingMethods()); Assert.assertEquals(finderBuilder.getMetadataType().getFullName(), "com.linkedin.restli.tools.test.TestRecord"); Assert.assertTrue(finderBuilder.getQueryParamMethods().size() == 1); QueryParamBindingMethodSpec finderQuery = finderBuilder.getQueryParamMethods().get(0); Assert.assertEquals(finderQuery.getParamName(), "tone"); Assert.assertEquals(finderQuery.getMethodName(), "toneParam"); Assert.assertEquals(finderQuery.getArgType().getFullName(), "com.linkedin.restli.tools.test.TestEnum"); Assert.assertFalse(finderQuery.isNeedAddParamMethod()); Assert.assertTrue(finderQuery.isOptional()); // assert action builder spec Assert.assertNotNull(actionBuilders); Assert.assertTrue(actionBuilders.size() == 3); for (ActionBuilderSpec spec : actionBuilders) { Assert.assertTrue(spec.getActionName().equals("someAction") || spec.getActionName().equals("anotherAction") || spec.getActionName().equals("voidAction")); if (spec.getActionName().equals("voidAction")) { Assert.assertFalse(spec.hasBindingMethods()); } else { Assert.assertTrue(spec.hasBindingMethods()); } } // assert get method builder query method Assert.assertNotNull(basicMethodBuilders); Assert.assertTrue(basicMethodBuilders.size() == 9); // 7 for root resource, 2 for sub resource for (RestMethodBuilderSpec spec : basicMethodBuilders) { if (spec.getResourceMethod() == ResourceMethod.GET) { Assert.assertNotNull(spec.getQueryParamMethods()); Assert.assertTrue(spec.getQueryParamMethods().size() == 1); Assert.assertTrue(spec.hasBindingMethods()); QueryParamBindingMethodSpec getQuery = spec.getQueryParamMethods().get(0); Assert.assertEquals(getQuery.getParamName(), "message"); Assert.assertEquals(getQuery.getMethodName(), "messageParam"); Assert.assertEquals(getQuery.getArgType().getSchema().getType(), DataSchema.Type.STRING); Assert.assertFalse(getQuery.isNeedAddParamMethod()); Assert.assertTrue(getQuery.isOptional()); if (spec.getResource().getName().equals("testCollection")) { DataMap expected = new DataMap(); expected.put("someAnnotation", new DataMap()); Assert.assertEquals(spec.getAnnotations(), expected); } } else if (spec.getResourceMethod() == ResourceMethod.DELETE && spec.getClassName().startsWith("TestCollectionSub")) { // sub resource delete method should have path keys List<PathKeyBindingMethodSpec> pathKeys = spec.getPathKeyMethods(); Assert.assertNotNull(pathKeys); Assert.assertTrue(pathKeys.size() == 1); Assert.assertTrue(spec.hasBindingMethods()); PathKeyBindingMethodSpec pathKeyMethod = pathKeys.get(0); Assert.assertEquals(pathKeyMethod.getPathKey(), "testCollectionId"); Assert.assertEquals(pathKeyMethod.getMethodName(), "testCollectionIdKey"); Assert.assertEquals(pathKeyMethod.getArgType().getSchema().getType(), DataSchema.Type.LONG); } else if (spec.getResourceMethod() == ResourceMethod.CREATE) { Assert.assertEquals(spec.getQueryParamMethods().size(), 1); Assert.assertTrue(spec.hasBindingMethods()); QueryParamBindingMethodSpec queryParam = spec.getQueryParamMethods().get(0); Assert.assertEquals(queryParam.getParamName(), "isNullId"); Assert.assertEquals(queryParam.isOptional(), true); DataMap expected = new DataMap(); expected.put("someOtherAnnotation", new DataMap()); Assert.assertEquals(spec.getAnnotations(), expected); } } }
public static ResolvedSchema expandCompositeTypeToSchema(DataType dataType) { if (dataType instanceof FieldsDataType) { return expandCompositeType((FieldsDataType) dataType); } else if (dataType.getLogicalType() instanceof LegacyTypeInformationType && dataType.getLogicalType().getTypeRoot() == STRUCTURED_TYPE) { return expandLegacyCompositeType(dataType); } throw new IllegalArgumentException("Expected a composite type"); }
@Test void testExpandThrowExceptionOnAtomicType() { assertThatThrownBy(() -> DataTypeUtils.expandCompositeTypeToSchema(DataTypes.TIMESTAMP())) .isInstanceOf(IllegalArgumentException.class); }
static void activateHttpAndHttpsProxies(Settings settings, SettingsDecrypter decrypter) throws MojoExecutionException { List<Proxy> proxies = new ArrayList<>(2); for (String protocol : ImmutableList.of("http", "https")) { if (areProxyPropertiesSet(protocol)) { continue; } settings.getProxies().stream() .filter(Proxy::isActive) .filter(proxy -> protocol.equals(proxy.getProtocol())) .findFirst() .ifPresent(proxies::add); } if (proxies.isEmpty()) { return; } SettingsDecryptionRequest request = new DefaultSettingsDecryptionRequest().setProxies(proxies); SettingsDecryptionResult result = decrypter.decrypt(request); for (SettingsProblem problem : result.getProblems()) { if (problem.getSeverity() == SettingsProblem.Severity.ERROR || problem.getSeverity() == SettingsProblem.Severity.FATAL) { throw new MojoExecutionException( "Unable to decrypt proxy info from settings.xml: " + problem); } } result.getProxies().forEach(MavenSettingsProxyProvider::setProxyProperties); }
@Test public void testActivateHttpAndHttpsProxies_emptySettingsDecrypter() { try { MavenSettingsProxyProvider.activateHttpAndHttpsProxies( mixedProxyEncryptedSettings, emptySettingsDecrypter); Assert.fail(); } catch (MojoExecutionException ex) { MatcherAssert.assertThat( ex.getMessage(), CoreMatchers.startsWith("Unable to decrypt proxy info from settings.xml:")); } }
@Override public List<String> listDbNames() { return hmsOps.getAllDatabaseNames(); }
@Test public void testListDbNames() { List<String> databaseNames = hudiMetadata.listDbNames(); Assert.assertEquals(Lists.newArrayList("db1", "db2"), databaseNames); CachingHiveMetastore queryLevelCache = CachingHiveMetastore.createQueryLevelInstance(cachingHiveMetastore, 100); Assert.assertEquals(Lists.newArrayList("db1", "db2"), queryLevelCache.getAllDatabaseNames()); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void deleteMessage() { Message message = bot.execute(new SendMessage(chatId, "message for delete")).message(); BaseResponse response = bot.execute(new DeleteMessage(chatId, message.messageId())); assertTrue(response.isOk()); }
@VisibleForTesting static List<String> stringToList(String value, String regex) { if (value == null || value.isEmpty()) { return ImmutableList.of(); } return Arrays.stream(value.split(regex)).map(String::trim).collect(Collectors.toList()); }
@Test public void testStringToList() { List<String> result = IcebergSinkConfig.stringToList(null, ","); assertThat(result).isEmpty(); result = IcebergSinkConfig.stringToList("", ","); assertThat(result).isEmpty(); result = IcebergSinkConfig.stringToList("one ", ","); assertThat(result).contains("one"); result = IcebergSinkConfig.stringToList("one, two", ","); assertThat(result).contains("one", "two"); result = IcebergSinkConfig.stringToList("bucket(id, 4)", ","); assertThat(result).contains("bucket(id", "4)"); result = IcebergSinkConfig.stringToList("bucket(id, 4)", IcebergSinkConfig.COMMA_NO_PARENS_REGEX); assertThat(result).contains("bucket(id, 4)"); result = IcebergSinkConfig.stringToList( "bucket(id, 4), type", IcebergSinkConfig.COMMA_NO_PARENS_REGEX); assertThat(result).contains("bucket(id, 4)", "type"); }
public final Sensor storeLevelSensor(final String taskId, final String storeName, final String sensorSuffix, final RecordingLevel recordingLevel, final Sensor... parents) { final String sensorPrefix = storeSensorPrefix(Thread.currentThread().getName(), taskId, storeName); // since the keys in the map storeLevelSensors contain the name of the current thread and threads only // access keys in which their name is contained, the value in the maps do not need to be thread safe // and we can use a LinkedList here. // TODO: In future, we could use thread local maps since each thread will exclusively access the set of keys // that contain its name. Similar is true for the other metric levels. Thread-level metrics need some // special attention, since they are created before the thread is constructed. The creation of those // metrics could be moved into the run() method of the thread. return getSensors(storeLevelSensors, sensorSuffix, sensorPrefix, recordingLevel, parents); }
@Test public void shouldGetExistingStoreLevelSensor() { final Metrics metrics = mock(Metrics.class); final RecordingLevel recordingLevel = RecordingLevel.INFO; setupGetExistingSensorTest(metrics); final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, CLIENT_ID, VERSION, time); final Sensor actualSensor = streamsMetrics.storeLevelSensor( TASK_ID1, STORE_NAME1, SENSOR_NAME_1, recordingLevel ); assertThat(actualSensor, is(equalToObject(sensor))); }
@Override public Plugin create(final PluginWrapper pluginWrapper) { String pluginClassName = pluginWrapper.getDescriptor().getPluginClass(); log.debug("Create instance for plugin '{}'", pluginClassName); Class<?> pluginClass; try { pluginClass = pluginWrapper.getPluginClassLoader().loadClass(pluginClassName); } catch (ClassNotFoundException e) { log.error(e.getMessage(), e); return null; } // once we have the class, we can do some checks on it to ensure // that it is a valid implementation of a plugin. int modifiers = pluginClass.getModifiers(); if (Modifier.isAbstract(modifiers) || Modifier.isInterface(modifiers) || (!Plugin.class.isAssignableFrom(pluginClass))) { log.error("The plugin class '{}' is not valid", pluginClassName); return null; } return createInstance(pluginClass, pluginWrapper); }
@Test public void testCreateFail() { PluginDescriptor pluginDescriptor = mock(PluginDescriptor.class); JavaFileObject object = JavaSources.compile(FailTestPlugin); String pluginClassName = JavaFileObjectUtils.getClassName(object); when(pluginDescriptor.getPluginClass()).thenReturn(pluginClassName); PluginWrapper pluginWrapper = mock(PluginWrapper.class); when(pluginWrapper.getDescriptor()).thenReturn(pluginDescriptor); JavaFileObjectClassLoader classLoader = new JavaFileObjectClassLoader(); classLoader.load(FailTestPlugin); when(pluginWrapper.getPluginClassLoader()).thenReturn(classLoader); PluginFactory pluginFactory = new DefaultPluginFactory(); Plugin plugin = pluginFactory.create(pluginWrapper); assertNull(plugin); }
public CompletableFuture<AuthenticatedBackupUser> authenticateBackupUser( final BackupAuthCredentialPresentation presentation, final byte[] signature) { final PresentationSignatureVerifier signatureVerifier = verifyPresentation(presentation); return backupsDb .retrieveAuthenticationData(presentation.getBackupId()) .thenApply(optionalAuthenticationData -> { final BackupsDb.AuthenticationData authenticationData = optionalAuthenticationData .orElseGet(() -> { Metrics.counter(ZK_AUTHN_COUNTER_NAME, SUCCESS_TAG_NAME, String.valueOf(false), FAILURE_REASON_TAG_NAME, "missing_public_key") .increment(); // There was no stored public key, use a bunk public key so that validation will fail return new BackupsDb.AuthenticationData(INVALID_PUBLIC_KEY, null, null); }); return new AuthenticatedBackupUser( presentation.getBackupId(), signatureVerifier.verifySignature(signature, authenticationData.publicKey()), authenticationData.backupDir(), authenticationData.mediaDir()); }) .thenApply(result -> { Metrics.counter(ZK_AUTHN_COUNTER_NAME, SUCCESS_TAG_NAME, String.valueOf(true)).increment(); return result; }); }
@Test public void unknownPublicKey() throws VerificationFailedException { final BackupAuthCredentialPresentation presentation = backupAuthTestUtil.getPresentation( BackupLevel.MESSAGES, backupKey, aci); final ECKeyPair keyPair = Curve.generateKeyPair(); final byte[] signature = keyPair.getPrivateKey().calculateSignature(presentation.serialize()); // haven't set a public key yet assertThat(CompletableFutureTestUtil.assertFailsWithCause( StatusRuntimeException.class, backupManager.authenticateBackupUser(presentation, signature)) .getStatus().getCode()) .isEqualTo(Status.UNAUTHENTICATED.getCode()); }
public static TransportSecurityOptions fromJson(String json) { return new TransportSecurityOptionsJsonSerializer() .deserialize(new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8))); }
@Test void can_read_options_from_json() throws IOException { String tlsJson = Files.readString(TEST_CONFIG_FILE); TransportSecurityOptions actualOptions = TransportSecurityOptions.fromJson(tlsJson); assertEquals(OPTIONS, actualOptions); }
public static void addEstimatePendingCompactionBytesMetric(final StreamsMetricsImpl streamsMetrics, final RocksDBMetricContext metricContext, final Gauge<BigInteger> valueProvider) { addMutableMetric( streamsMetrics, metricContext, valueProvider, ESTIMATED_BYTES_OF_PENDING_COMPACTION, ESTIMATED_BYTES_OF_PENDING_COMPACTION_DESCRIPTION ); }
@Test public void shouldAddEstimatePendingCompactionBytesMetric() { final String name = "estimate-pending-compaction-bytes"; final String description = "Estimated total number of bytes a compaction needs to rewrite on disk to get all levels down to under target size"; runAndVerifyMutableMetric( name, description, () -> RocksDBMetrics.addEstimatePendingCompactionBytesMetric(streamsMetrics, ROCKSDB_METRIC_CONTEXT, VALUE_PROVIDER) ); }
public boolean cleanTable() { boolean allRemoved = true; Set<String> removedPaths = new HashSet<>(); for (PhysicalPartition partition : table.getAllPhysicalPartitions()) { try { WarehouseManager manager = GlobalStateMgr.getCurrentState().getWarehouseMgr(); Warehouse warehouse = manager.getBackgroundWarehouse(); ShardInfo shardInfo = LakeTableHelper.getAssociatedShardInfo(partition, warehouse.getId()).orElse(null); if (shardInfo == null || removedPaths.contains(shardInfo.getFilePath().getFullPath())) { continue; } removedPaths.add(shardInfo.getFilePath().getFullPath()); if (!LakeTableHelper.removeShardRootDirectory(shardInfo)) { allRemoved = false; } } catch (StarClientException e) { LOG.warn("Fail to get shard info of partition {}: {}", partition.getId(), e.getMessage()); allRemoved = false; } } return allRemoved; }
@Test public void testGetShardInfoFailed(@Mocked LakeTable table, @Mocked PhysicalPartition partition, @Mocked MaterializedIndex index, @Mocked LakeTablet tablet, @Mocked LakeService lakeService) throws StarClientException { LakeTableCleaner cleaner = new LakeTableCleaner(table); new MockUp<WarehouseManager>() { @Mock public Warehouse getWarehouse(String warehouseName) { return new DefaultWarehouse(WarehouseManager.DEFAULT_WAREHOUSE_ID, WarehouseManager.DEFAULT_WAREHOUSE_NAME); } @Mock public Warehouse getWarehouse(long warehouseId) { return new DefaultWarehouse(WarehouseManager.DEFAULT_WAREHOUSE_ID, WarehouseManager.DEFAULT_WAREHOUSE_NAME); } }; new Expectations() { { table.getAllPhysicalPartitions(); result = Lists.newArrayList(partition); minTimes = 1; maxTimes = 1; partition.getMaterializedIndices(MaterializedIndex.IndexExtState.ALL); result = Lists.newArrayList(index); minTimes = 1; maxTimes = 1; index.getTablets(); result = Lists.newArrayList(tablet); minTimes = 1; maxTimes = 1; } }; Assert.assertFalse(cleaner.cleanTable()); }
@VisibleForTesting static String logicalToProtoSchema(final LogicalSchema schema) { final ConnectSchema connectSchema = ConnectSchemas.columnsToConnectSchema(schema.columns()); final ProtobufSchema protobufSchema = new ProtobufData( new ProtobufDataConfig(ImmutableMap.of())).fromConnectSchema(connectSchema); return protobufSchema.canonicalString(); }
@Test public void shouldConvertComplexLogicalSchemaToProtobufSchema() { // Given: final LogicalSchema schema = LogicalSchema.builder() .keyColumn(ColumnName.of("K"), SqlTypes.struct() .field("F1", SqlTypes.array(SqlTypes.STRING)) .build()) .valueColumn(ColumnName.of("STR"), SqlTypes.STRING) .valueColumn(ColumnName.of("LONG"), SqlTypes.BIGINT) .valueColumn(ColumnName.of("DEC"), SqlTypes.decimal(4, 2)) .valueColumn(ColumnName.of("BYTES_"), SqlTypes.BYTES) .valueColumn(ColumnName.of("ARRAY"), SqlTypes.array(SqlTypes.STRING)) .valueColumn(ColumnName.of("MAP"), SqlTypes.map(SqlTypes.STRING, SqlTypes.STRING)) .valueColumn(ColumnName.of("STRUCT"), SqlTypes.struct().field("F1", SqlTypes.INTEGER).build()) .valueColumn(ColumnName.of("COMPLEX"), SqlTypes.struct() .field("DECIMAL", SqlTypes.decimal(2, 1)) .field("STRUCT", SqlTypes.struct() .field("F1", SqlTypes.STRING) .field("F2", SqlTypes.INTEGER) .build()) .field("ARRAY_STRUCT", SqlTypes.array(SqlTypes.struct().field("F1", SqlTypes.STRING).build())) .field("ARRAY_MAP", SqlTypes.array(SqlTypes.map(SqlTypes.STRING, SqlTypes.INTEGER))) .field("MAP_ARRAY", SqlTypes.map(SqlTypes.STRING, SqlTypes.array(SqlTypes.STRING))) .field("MAP_MAP", SqlTypes.map(SqlTypes.STRING, SqlTypes.map(SqlTypes.STRING, SqlTypes.INTEGER) )) .field("MAP_STRUCT", SqlTypes.map(SqlTypes.STRING, SqlTypes.struct().field("F1", SqlTypes.STRING).build() )) .build() ) .valueColumn(ColumnName.of("TIMESTAMP"), SqlTypes.TIMESTAMP) .valueColumn(ColumnName.of("DATE"), SqlTypes.DATE) .valueColumn(ColumnName.of("TIME"), SqlTypes.TIME) .headerColumn(ColumnName.of("HEAD"), Optional.of("h0")) .build(); final String expectedProtoSchemaString = "syntax = \"proto3\";\n" + "\n" + "import \"confluent/type/decimal.proto\";\n" + "import \"google/protobuf/timestamp.proto\";\n" + "import \"google/type/date.proto\";\n" + "import \"google/type/timeofday.proto\";\n" + "\n" + "message ConnectDefault1 {\n" + " ConnectDefault2 K = 1;\n" + " string STR = 2;\n" + " int64 LONG = 3;\n" + " confluent.type.Decimal DEC = 4 [(confluent.field_meta) = {\n" + " params: [\n" + " {\n" + " value: \"4\",\n" + " key: \"precision\"\n" + " },\n" + " {\n" + " value: \"2\",\n" + " key: \"scale\"\n" + " }\n" + " ]\n" + " }];\n" + " bytes BYTES_ = 5;\n" + " repeated string ARRAY = 6;\n" + " repeated ConnectDefault3Entry MAP = 7;\n" + " ConnectDefault4 STRUCT = 8;\n" + " ConnectDefault5 COMPLEX = 9;\n" + " google.protobuf.Timestamp TIMESTAMP = 10;\n" + " google.type.Date DATE = 11;\n" + " google.type.TimeOfDay TIME = 12;\n" + " bytes HEAD = 13;\n" + "\n" + " message ConnectDefault2 {\n" + " repeated string F1 = 1;\n" + " }\n" + " message ConnectDefault3Entry {\n" + " string key = 1;\n" + " string value = 2;\n" + " }\n" + " message ConnectDefault4 {\n" + " int32 F1 = 1;\n" + " }\n" + " message ConnectDefault5 {\n" + " confluent.type.Decimal DECIMAL = 1 [(confluent.field_meta) = {\n" + " params: [\n" + " {\n" + " value: \"2\",\n" + " key: \"precision\"\n" + " },\n" + " {\n" + " value: \"1\",\n" + " key: \"scale\"\n" + " }\n" + " ]\n" + " }];\n" + " ConnectDefault6 STRUCT = 2;\n" + " repeated ConnectDefault7 ARRAY_STRUCT = 3;\n" + " repeated ConnectDefault8Entry ARRAY_MAP = 4;\n" + " repeated ConnectDefault9Entry MAP_ARRAY = 5;\n" + " repeated ConnectDefault10Entry MAP_MAP = 6;\n" + " repeated ConnectDefault12Entry MAP_STRUCT = 7;\n" + " \n" + " message ConnectDefault6 {\n" + " string F1 = 1;\n" + " int32 F2 = 2;\n" + " }\n" + " message ConnectDefault7 {\n" + " string F1 = 1;\n" + " }\n" + " message ConnectDefault8Entry {\n" + " string key = 1;\n" + " int32 value = 2;\n" + " }\n" + " message ConnectDefault9Entry {\n" + " string key = 1;\n" + " repeated string value = 2;\n" + " }\n" + " message ConnectDefault10Entry {\n" + " string key = 1;\n" + " repeated ConnectDefault11Entry value = 2;\n" + " \n" + " message ConnectDefault11Entry {\n" + " string key = 1;\n" + " int32 value = 2;\n" + " }\n" + " }\n" + " message ConnectDefault12Entry {\n" + " string key = 1;\n" + " ConnectDefault13 value = 2;\n" + " \n" + " message ConnectDefault13 {\n" + " string F1 = 1;\n" + " }\n" + " }\n" + " }\n" + "}\n"; // When: final String protoSchema = JsonStreamedRowResponseWriter.logicalToProtoSchema(schema); // Then: assertThat(protoSchema, is(expectedProtoSchemaString)); }
@Override public WhitelistedSite saveNew(WhitelistedSite whitelistedSite) { if (whitelistedSite.getId() != null) { throw new IllegalArgumentException("A new whitelisted site cannot be created with an id value already set: " + whitelistedSite.getId()); } return repository.save(whitelistedSite); }
@Test(expected = IllegalArgumentException.class) public void saveNew_notNullId() { WhitelistedSite site = Mockito.mock(WhitelistedSite.class); Mockito.when(site.getId()).thenReturn(12345L); // arbitrary long value service.saveNew(site); }
public static List<UniqueConstraint> parse(String defaultCatalogName, String defaultDbName, String defaultTableName, String constraintDescs) { if (Strings.isNullOrEmpty(constraintDescs)) { return null; } String[] constraintArray = constraintDescs.split(";"); List<UniqueConstraint> uniqueConstraints = Lists.newArrayList(); for (String constraintDesc : constraintArray) { if (Strings.isNullOrEmpty(constraintDesc)) { continue; } Pair<TableName, List<String>> descResult = parseUniqueConstraintDesc( defaultCatalogName, defaultDbName, defaultTableName, constraintDesc); uniqueConstraints.add(new UniqueConstraint(descResult.first.getCatalog(), descResult.first.getDb(), descResult.first.getTbl(), descResult.second.stream().map(ColumnId::create).collect(Collectors.toList()))); } return uniqueConstraints; }
@Test public void testParse() throws AnalysisException { String constraintDescs = "col1, col2 , col3 "; List<UniqueConstraint> results = UniqueConstraint.parse(null, null, null, constraintDescs); Assert.assertEquals(1, results.size()); Assert.assertEquals(Lists.newArrayList(ColumnId.create("col1"), ColumnId.create("col2"), ColumnId.create("col3")), results.get(0).getUniqueColumns()); String constraintDescs2 = "col1, col2 , col3 ; col4, col5, col6, col7 ; col8,;"; List<UniqueConstraint> results2 = UniqueConstraint.parse(null, null, null, constraintDescs2); Assert.assertEquals(3, results2.size()); Assert.assertEquals(Lists.newArrayList(ColumnId.create("col1"), ColumnId.create("col2"), ColumnId.create("col3")), results2.get(0).getUniqueColumns()); Assert.assertEquals(Lists.newArrayList(ColumnId.create("col4"), ColumnId.create("col5"), ColumnId.create("col6"), ColumnId.create("col7")), results2.get(1).getUniqueColumns()); Assert.assertEquals(Lists.newArrayList(ColumnId.create("col8")), results2.get(2).getUniqueColumns()); String constraintDescs3 = "hive_catalog.db1.table1.col1, hive_catalog.db1.table1.col2, hive_catalog.db1.table1.col3;"; List<UniqueConstraint> results3 = UniqueConstraint.parse(null, null, null, constraintDescs3); Assert.assertEquals(1, results3.size()); Assert.assertEquals(Lists.newArrayList(ColumnId.create("col1"), ColumnId.create("col2"), ColumnId.create("col3")), results.get(0).getUniqueColumns()); Assert.assertEquals("hive_catalog", results3.get(0).getCatalogName()); Assert.assertEquals("db1", results3.get(0).getDbName()); Assert.assertEquals("table1", results3.get(0).getTableName()); String constraintDescs4 = "hive_catalog.db1.table1.col1, col2, col3;"; List<UniqueConstraint> results4 = UniqueConstraint.parse(null, null, null, constraintDescs4); Assert.assertEquals(1, results4.size()); Assert.assertEquals(Lists.newArrayList(ColumnId.create("col1"), ColumnId.create("col2"), ColumnId.create("col3")), results4.get(0).getUniqueColumns()); Assert.assertEquals("hive_catalog", results4.get(0).getCatalogName()); Assert.assertEquals("db1", results4.get(0).getDbName()); Assert.assertEquals("table1", results4.get(0).getTableName()); String constraintDescs5 = "hive_catalog.db1.table1.col1, col2, col3; hive_catalog.db1.table2.col1, col2, col3;"; List<UniqueConstraint> results5 = UniqueConstraint.parse(null, null, null, constraintDescs5); Assert.assertEquals(2, results5.size()); Assert.assertEquals(Lists.newArrayList(ColumnId.create("col1"), ColumnId.create("col2"), ColumnId.create("col3")), results5.get(0).getUniqueColumns()); Assert.assertEquals("hive_catalog", results5.get(0).getCatalogName()); Assert.assertEquals("db1", results5.get(0).getDbName()); Assert.assertEquals("table1", results5.get(0).getTableName()); Assert.assertEquals(Lists.newArrayList(ColumnId.create("col1"), ColumnId.create("col2"), ColumnId.create("col3")), results5.get(1).getUniqueColumns()); Assert.assertEquals("hive_catalog", results5.get(1).getCatalogName()); Assert.assertEquals("db1", results5.get(1).getDbName()); Assert.assertEquals("table2", results5.get(1).getTableName()); }
public ScalarOperator rewrite(ScalarOperator root, List<ScalarOperatorRewriteRule> ruleList) { ScalarOperator result = root; context.reset(); int changeNums; do { changeNums = context.changeNum(); for (ScalarOperatorRewriteRule rule : ruleList) { result = rewriteByRule(result, rule); } if (changeNums > Config.max_planner_scalar_rewrite_num) { throw new StarRocksPlannerException("Planner rewrite scalar operator over limit", ErrorType.INTERNAL_ERROR); } } while (changeNums != context.changeNum()); return result; }
@Test public void testNormalizeIsNull() { ColumnRefOperator column1 = new ColumnRefOperator(0, Type.INT, "test0", false); IsNullPredicateOperator isnotNull = new IsNullPredicateOperator(true, column1); ScalarOperator rewritten = new ScalarOperatorRewriter() .rewrite(isnotNull, ScalarOperatorRewriter.MV_SCALAR_REWRITE_RULES); Assert.assertEquals(ConstantOperator.TRUE, rewritten); ScalarOperator rewritten2 = new ScalarOperatorRewriter() .rewrite(isnotNull, ScalarOperatorRewriter.DEFAULT_REWRITE_SCAN_PREDICATE_RULES); Assert.assertEquals(ConstantOperator.TRUE, rewritten2); }
static BytecodeExpression equal(BytecodeExpression left, BytecodeExpression right) { requireNonNull(left, "left is null"); requireNonNull(right, "right is null"); checkArgument(left.getType().equals(right.getType()), "left and right must be the same type"); OpCode comparisonInstruction; OpCode noMatchJumpInstruction; Class<?> type = left.getType().getPrimitiveType(); if (type == int.class) { comparisonInstruction = null; noMatchJumpInstruction = IF_ICMPNE; } else if (type == long.class) { comparisonInstruction = LCMP; noMatchJumpInstruction = IFNE; } else if (type == float.class) { comparisonInstruction = FCMPL; noMatchJumpInstruction = IFNE; } else if (type == double.class) { comparisonInstruction = DCMPL; noMatchJumpInstruction = IFNE; } else if (type == null) { comparisonInstruction = null; noMatchJumpInstruction = IF_ACMPNE; } else { throw new IllegalArgumentException("Equal does not support " + type); } return new ComparisonBytecodeExpression("==", comparisonInstruction, noMatchJumpInstruction, left, right); }
@SuppressWarnings({"FloatingPointEquality", "ComparisonToNaN", "EqualsNaN", "EqualsWithItself"}) @Test public void testEqual() throws Exception { assertBytecodeExpression(equal(constantInt(7), constantInt(3)), 7 == 3, "(7 == 3)"); assertBytecodeExpression(equal(constantInt(7), constantInt(7)), 7 == 7, "(7 == 7)"); assertBytecodeExpression(equal(constantLong(7L), constantLong(3L)), 7L == 3L, "(7L == 3L)"); assertBytecodeExpression(equal(constantLong(7L), constantLong(7L)), 7L == 7L, "(7L == 7L)"); assertBytecodeExpression(equal(constantFloat(7.7f), constantFloat(3.3f)), 7.7f == 3.3f, "(7.7f == 3.3f)"); assertBytecodeExpression(equal(constantFloat(7.7f), constantFloat(7.7f)), 7.7f == 7.7f, "(7.7f == 7.7f)"); assertBytecodeExpression(equal(constantFloat(Float.NaN), constantFloat(7.7f)), Float.NaN == 7.7f, "(NaNf == 7.7f)"); assertBytecodeExpression(equal(constantFloat(Float.NaN), constantFloat(Float.NaN)), Float.NaN == Float.NaN, "(NaNf == NaNf)"); assertBytecodeExpression(equal(constantDouble(7.7), constantDouble(3.3)), 7.7 == 3.3, "(7.7 == 3.3)"); assertBytecodeExpression(equal(constantDouble(7.7), constantDouble(7.7)), 7.7 == 7.7, "(7.7 == 7.7)"); assertBytecodeExpression(equal(constantDouble(Double.NaN), constantDouble(7.7)), Double.NaN == 7.7, "(NaN == 7.7)"); assertBytecodeExpression(equal(constantDouble(7.7), constantDouble(Double.NaN)), 7.7 == Double.NaN, "(7.7 == NaN)"); assertBytecodeExpression(equal(constantDouble(Double.NaN), constantDouble(Double.NaN)), Double.NaN == Double.NaN, "(NaN == NaN)"); // the byte code is verifying with == but that breaks check style so we use assertBytecodeExpression(equal(constantString("foo"), constantString("bar")), "foo".equals("bar"), "(\"foo\" == \"bar\")"); assertBytecodeExpression(equal(constantString("foo"), constantString("foo")), "foo".equals("foo"), "(\"foo\" == \"foo\")"); }
@Override public Writer append(CharSequence csq) { if (csq == null) { write("null"); } else { write(csq.toString()); } return this; }
@Test void testAppend() { UnsafeStringWriter writer = new UnsafeStringWriter(); writer.append('a'); writer.append("abc", 1, 2); writer.append('c'); writer.flush(); writer.close(); assertThat(writer.toString(), is("abc")); }
public void writeTelemetryData(JsonWriter json, TelemetryData telemetryData) { json.beginObject(); json.prop("id", telemetryData.getServerId()); json.prop(VERSION, telemetryData.getVersion()); json.prop("messageSequenceNumber", telemetryData.getMessageSequenceNumber()); json.prop("localTimestamp", toUtc(system2.now())); json.prop(NCD_ID, telemetryData.getNcdId()); telemetryData.getEdition().ifPresent(e -> json.prop("edition", e.name().toLowerCase(Locale.ENGLISH))); json.prop("defaultQualityGate", telemetryData.getDefaultQualityGate()); json.prop("sonarway_quality_gate_uuid", telemetryData.getSonarWayQualityGate()); json.name("database"); json.beginObject(); json.prop("name", telemetryData.getDatabase().name()); json.prop(VERSION, telemetryData.getDatabase().version()); json.endObject(); json.name("plugins"); json.beginArray(); telemetryData.getPlugins().forEach((plugin, version) -> { json.beginObject(); json.prop("name", plugin); json.prop(VERSION, version); json.endObject(); }); json.endArray(); if (!telemetryData.getCustomSecurityConfigs().isEmpty()) { json.name("customSecurityConfig"); json.beginArray(); json.values(telemetryData.getCustomSecurityConfigs()); json.endArray(); } telemetryData.hasUnanalyzedC().ifPresent(hasUnanalyzedC -> json.prop("hasUnanalyzedC", hasUnanalyzedC)); telemetryData.hasUnanalyzedCpp().ifPresent(hasUnanalyzedCpp -> json.prop("hasUnanalyzedCpp", hasUnanalyzedCpp)); if (telemetryData.getInstallationDate() != null) { json.prop("installationDate", toUtc(telemetryData.getInstallationDate())); } if (telemetryData.getInstallationVersion() != null) { json.prop("installationVersion", telemetryData.getInstallationVersion()); } json.prop("container", telemetryData.isInContainer()); writeUserData(json, telemetryData); writeProjectData(json, telemetryData); writeProjectStatsData(json, telemetryData); writeBranches(json, telemetryData); writeNewCodeDefinitions(json, telemetryData); writeQualityGates(json, telemetryData); writeQualityProfiles(json, telemetryData); writeManagedInstanceInformation(json, telemetryData.getManagedInstanceInformation()); writeCloudUsage(json, telemetryData.getCloudUsage()); extensions.forEach(e -> e.write(json)); json.endObject(); }
@Test void does_not_write_installation_date_if_null() { TelemetryData data = telemetryBuilder() .setInstallationDate(null) .build(); String json = writeTelemetryData(data); assertThat(json).doesNotContain("installationDate"); }
@Override public List<Namespace> listNamespaces(Namespace namespace) throws NoSuchNamespaceException { if (!namespace.isEmpty()) { // if it is not a list all op, just check if the namespace exists and return empty. if (namespaceExists(namespace)) { return Lists.newArrayList(); } throw new NoSuchNamespaceException( "Glue does not support nested namespace, cannot list namespaces under %s", namespace); } // should be safe to list all before returning the list, instead of dynamically load the list. String nextToken = null; List<Namespace> results = Lists.newArrayList(); do { GetDatabasesResponse response = glue.getDatabases( GetDatabasesRequest.builder() .catalogId(awsProperties.glueCatalogId()) .nextToken(nextToken) .build()); nextToken = response.nextToken(); if (response.hasDatabaseList()) { results.addAll( response.databaseList().stream() .map(GlueToIcebergConverter::toNamespace) .collect(Collectors.toList())); } } while (nextToken != null); LOG.debug("Listing namespace {} returned namespaces: {}", namespace, results); return results; }
@Test public void testListNamespacesBadName() { assertThatThrownBy(() -> glueCatalog.listNamespaces(Namespace.of("db-1"))) .isInstanceOf(ValidationException.class) .hasMessage( "Cannot convert namespace db-1 to Glue database name, " + "because it must be 1-252 chars of lowercase letters, numbers, underscore"); }
public void execute() { new PathAwareCrawler<>( FormulaExecutorComponentVisitor.newBuilder(metricRepository, measureRepository).buildFor(formulas)) .visit(treeRootHolder.getReportTreeRoot()); }
@Test public void dont_compute_duplicated_lines_density_when_lines_is_zero() { when(FILE_1_ATTRS.getLines()).thenReturn(0); when(FILE_2_ATTRS.getLines()).thenReturn(0); underTest.execute(); assertNoRawMeasures(DUPLICATED_LINES_DENSITY_KEY); }
@Override public DescribeLogDirsResult describeLogDirs(Collection<Integer> brokers, DescribeLogDirsOptions options) { final Map<Integer, KafkaFutureImpl<Map<String, LogDirDescription>>> futures = new HashMap<>(brokers.size()); final long now = time.milliseconds(); for (final Integer brokerId : brokers) { KafkaFutureImpl<Map<String, LogDirDescription>> future = new KafkaFutureImpl<>(); futures.put(brokerId, future); runnable.call(new Call("describeLogDirs", calcDeadlineMs(now, options.timeoutMs()), new ConstantNodeIdProvider(brokerId)) { @Override public DescribeLogDirsRequest.Builder createRequest(int timeoutMs) { // Query selected partitions in all log directories return new DescribeLogDirsRequest.Builder(new DescribeLogDirsRequestData().setTopics(null)); } @Override public void handleResponse(AbstractResponse abstractResponse) { DescribeLogDirsResponse response = (DescribeLogDirsResponse) abstractResponse; Map<String, LogDirDescription> descriptions = logDirDescriptions(response); if (!descriptions.isEmpty()) { future.complete(descriptions); } else { // Up to v3 DescribeLogDirsResponse did not have an error code field, hence it defaults to None Errors error = response.data().errorCode() == Errors.NONE.code() ? Errors.CLUSTER_AUTHORIZATION_FAILED : Errors.forCode(response.data().errorCode()); future.completeExceptionally(error.exception()); } } @Override void handleFailure(Throwable throwable) { future.completeExceptionally(throwable); } }, now); } return new DescribeLogDirsResult(new HashMap<>(futures)); }
@SuppressWarnings("deprecation") @Test public void testDescribeLogDirsOfflineDirDeprecated() throws ExecutionException, InterruptedException { Set<Integer> brokers = singleton(0); String logDir = "/var/data/kafka"; Errors error = Errors.KAFKA_STORAGE_ERROR; try (AdminClientUnitTestEnv env = mockClientEnv()) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponseFrom( prepareDescribeLogDirsResponse(error, logDir, emptyList()), env.cluster().nodeById(0)); DescribeLogDirsResult result = env.adminClient().describeLogDirs(brokers); Map<Integer, KafkaFuture<Map<String, DescribeLogDirsResponse.LogDirInfo>>> deprecatedValues = result.values(); assertEquals(brokers, deprecatedValues.keySet()); assertNotNull(deprecatedValues.get(0)); Map<String, DescribeLogDirsResponse.LogDirInfo> valuesMap = deprecatedValues.get(0).get(); assertEquals(singleton(logDir), valuesMap.keySet()); assertEquals(error, valuesMap.get(logDir).error); assertEquals(emptySet(), valuesMap.get(logDir).replicaInfos.keySet()); Map<Integer, Map<String, DescribeLogDirsResponse.LogDirInfo>> deprecatedAll = result.all().get(); assertEquals(brokers, deprecatedAll.keySet()); Map<String, DescribeLogDirsResponse.LogDirInfo> allMap = deprecatedAll.get(0); assertNotNull(allMap); assertEquals(singleton(logDir), allMap.keySet()); assertEquals(error, allMap.get(logDir).error); assertEquals(emptySet(), allMap.get(logDir).replicaInfos.keySet()); } }
public static String humanReadableByteCount(final long bytes) { final String[] units = {"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB"}; final int base = 1024; // When using the smallest unit no decimal point is needed, because it's the exact number. if (bytes < base) { return bytes + " " + units[0]; } final int exponent = (int) (Math.log(bytes) / Math.log(base)); final String unit = units[exponent]; return StringUtils.f("%.1f %s", bytes / Math.pow(base, exponent), unit); }
@Test public void testHumanReadable() { assertThat(StringUtils.humanReadableByteCount(1024L * 1024L * 1024L * 5L + 1024L * 1024L * 512L)).isEqualTo("5.5 GiB"); assertThat(StringUtils.humanReadableByteCount(1024L * 1024L * 1024L * 5L)).isEqualTo("5.0 GiB"); assertThat(StringUtils.humanReadableByteCount(1024L * 1024L * 4L + 1024L * 900L)).isEqualTo("4.9 MiB"); assertThat(StringUtils.humanReadableByteCount(1023)).isEqualTo("1023 B"); assertThat(StringUtils.humanReadableByteCount(1024)).isEqualTo("1.0 KiB"); assertThat(StringUtils.humanReadableByteCount(1024L * 1024L * 1024L * 1024L * 5L + 1024L * 1024L * 512L)).isEqualTo("5.0 TiB"); assertThat(StringUtils.humanReadableByteCount(1024L * 5L + 512L)).isEqualTo("5.5 KiB"); }
public String build(TablePath tablePath) { StringBuilder createTableSql = new StringBuilder(); createTableSql .append(CatalogUtils.quoteIdentifier("CREATE TABLE ", fieldIde)) .append(tablePath.getSchemaAndTableName("\"")) .append(" (\n"); List<String> columnSqls = columns.stream() .map( column -> CatalogUtils.quoteIdentifier( buildColumnSql(column), fieldIde)) .collect(Collectors.toList()); if (createIndex && CollectionUtils.isNotEmpty(constraintKeys)) { for (ConstraintKey constraintKey : constraintKeys) { if (StringUtils.isBlank(constraintKey.getConstraintName()) || (primaryKey != null && StringUtils.equals( primaryKey.getPrimaryKey(), constraintKey.getConstraintName()))) { continue; } isHaveConstraintKey = true; switch (constraintKey.getConstraintType()) { case UNIQUE_KEY: String uniqueKeySql = buildUniqueKeySql(constraintKey); columnSqls.add("\t" + uniqueKeySql); break; case INDEX_KEY: String indexKeySql = buildIndexKeySql(tablePath, constraintKey); createIndexSqls.add(indexKeySql); break; case FOREIGN_KEY: // todo: add foreign key break; } } } createTableSql.append(String.join(",\n", columnSqls)); createTableSql.append("\n);"); List<String> commentSqls = columns.stream() .filter(column -> StringUtils.isNotBlank(column.getComment())) .map( columns -> buildColumnCommentSql( columns, tablePath.getSchemaAndTableName("\""))) .collect(Collectors.toList()); if (!commentSqls.isEmpty()) { createTableSql.append("\n"); createTableSql.append(String.join(";\n", commentSqls)).append(";"); } return createTableSql.toString(); }
@Test void build() { Arrays.asList(true, false) .forEach( otherDB -> { CatalogTable catalogTable = catalogTable(otherDB); PostgresCreateTableSqlBuilder postgresCreateTableSqlBuilder = new PostgresCreateTableSqlBuilder(catalogTable, true); String createTableSql = postgresCreateTableSqlBuilder.build( catalogTable.getTableId().toTablePath()); Assertions.assertEquals( "CREATE TABLE \"test\" (\n" + "\"id\" int4 NOT NULL PRIMARY KEY,\n" + "\"name\" text NOT NULL,\n" + "\"age\" int4 NOT NULL,\n" + "\tCONSTRAINT unique_name UNIQUE (\"name\")\n" + ");", createTableSql); Assertions.assertEquals( Lists.newArrayList( "CREATE INDEX test_index_age ON \"test\"(\"age\");"), postgresCreateTableSqlBuilder.getCreateIndexSqls()); // skip index PostgresCreateTableSqlBuilder postgresCreateTableSqlBuilderSkipIndex = new PostgresCreateTableSqlBuilder(catalogTable, false); String createTableSqlSkipIndex = postgresCreateTableSqlBuilderSkipIndex.build( catalogTable.getTableId().toTablePath()); Assertions.assertEquals( "CREATE TABLE \"test\" (\n" + "\"id\" int4 NOT NULL,\n" + "\"name\" text NOT NULL,\n" + "\"age\" int4 NOT NULL\n" + ");", createTableSqlSkipIndex); Assertions.assertEquals( Lists.newArrayList(), postgresCreateTableSqlBuilderSkipIndex.getCreateIndexSqls()); }); }
public static void main(String[] args) { try (var ignored = new SlidingDoor()) { LOGGER.info("Walking in."); } try (var ignored = new TreasureChest()) { LOGGER.info("Looting contents."); } }
@Test void shouldExecuteWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
@Override public void executeUpdate(final UnregisterStorageUnitStatement sqlStatement, final ContextManager contextManager) { if (!sqlStatement.isIfExists()) { checkExisted(sqlStatement.getStorageUnitNames()); } checkInUsed(sqlStatement); try { contextManager.getPersistServiceFacade().getMetaDataManagerPersistService().unregisterStorageUnits(database.getName(), sqlStatement.getStorageUnitNames()); } catch (final SQLException | ShardingSphereServerException ex) { throw new StorageUnitsOperateException("unregister", sqlStatement.getStorageUnitNames(), ex); } }
@Test void assertExecuteUpdateWithIfExists() throws SQLException { UnregisterStorageUnitStatement sqlStatement = new UnregisterStorageUnitStatement(true, Collections.singleton("foo_ds"), true, false); executor.executeUpdate(sqlStatement, contextManager); verify(metaDataManagerPersistService).unregisterStorageUnits("foo_db", sqlStatement.getStorageUnitNames()); }
@Override public RestLiResponseData<BatchGetResponseEnvelope> buildRestLiResponseData(Request request, RoutingResult routingResult, Object result, Map<String, String> headers, List<HttpCookie> cookies) { @SuppressWarnings({ "unchecked" }) /* constrained by signature of {@link com.linkedin.restli.server.resources.CollectionResource#batchGet(java.util.Set)} */ final Map<Object, RecordTemplate> entities = (Map<Object, RecordTemplate>) result; Map<Object, HttpStatus> statuses = Collections.emptyMap(); Map<Object, RestLiServiceException> serviceErrors = Collections.emptyMap(); if (result instanceof BatchResult) { @SuppressWarnings({ "unchecked" }) /* constrained by signature of {@link com.linkedin.restli.server.resources.CollectionResource#batchGet(java.util.Set)} */ final BatchResult<Object, RecordTemplate> batchResult = (BatchResult<Object, RecordTemplate>) result; statuses = batchResult.getStatuses(); serviceErrors = batchResult.getErrors(); } try { if (statuses.containsKey(null)) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null key inside of a Map returned by the resource method: " + routingResult .getResourceMethod()); } } catch (NullPointerException e) { // Some map implementations will throw an NPE if they do not support null keys. // In this case it is OK to swallow this exception and proceed. } TimingContextUtil.beginTiming(routingResult.getContext().getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); Map<Object, BatchResponseEntry> batchResult = new HashMap<>(entities.size() + serviceErrors.size()); for (Map.Entry<Object, RecordTemplate> entity : entities.entrySet()) { if (entity.getKey() == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null key inside of a Map returned by the resource method: " + routingResult .getResourceMethod()); } Object finalKey = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(entity.getKey(), routingResult); DataMap rawData = entity.getValue().data(); if (routingResult.getContext().isFillInDefaultsRequested()) { rawData = (DataMap) ResponseUtils.fillInDataDefault(entity.getValue().schema(), rawData); } final DataMap projectedData = RestUtils.projectFields(rawData, routingResult.getContext()); AnyRecord anyRecord = new AnyRecord(projectedData); batchResult.put(finalKey, new BatchResponseEntry(statuses.get(entity.getKey()), anyRecord)); } TimingContextUtil.endTiming(routingResult.getContext().getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); for (Map.Entry<Object, RestLiServiceException> entity : serviceErrors.entrySet()) { if (entity.getKey() == null || entity.getValue() == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null key inside of a Map returned by the resource method: " + routingResult .getResourceMethod()); } Object finalKey = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(entity.getKey(), routingResult); batchResult.put(finalKey, new BatchResponseEntry(statuses.get(entity.getKey()), entity.getValue())); } final Map<Object, RestLiServiceException> contextErrors = routingResult.getContext().getBatchKeyErrors(); for (Map.Entry<Object, RestLiServiceException> entry : contextErrors.entrySet()) { Object finalKey = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(entry.getKey(), routingResult); batchResult.put(finalKey, new BatchResponseEntry(statuses.get(entry.getKey()), entry.getValue())); } return new RestLiResponseDataImpl<>(new BatchGetResponseEnvelope(HttpStatus.S_200_OK, batchResult), headers, cookies); }
@Test public void testContextErrors() { BatchGetResponseBuilder builder = new BatchGetResponseBuilder(new ErrorResponseBuilder()); ServerResourceContext context = EasyMock.createMock(ServerResourceContext.class); Map<Object, RestLiServiceException> errors = new HashMap<>(); RestLiServiceException exception = new RestLiServiceException(HttpStatus.S_402_PAYMENT_REQUIRED); errors.put("foo", exception); EasyMock.expect(context.hasParameter("altkey")).andReturn(false).anyTimes(); EasyMock.expect(context.getBatchKeyErrors()).andReturn(errors).anyTimes(); EasyMock.expect(context.getRawRequestContext()).andReturn(new RequestContext()).anyTimes(); EasyMock.replay(context); RoutingResult routingResult = new RoutingResult(context, null); RestLiResponseData<BatchGetResponseEnvelope> responseData = builder.buildRestLiResponseData(null, routingResult, new BatchResult<>(Collections.emptyMap(), Collections.emptyMap()), Collections.emptyMap(), Collections.emptyList()); Assert.assertEquals(responseData.getResponseEnvelope().getBatchResponseMap().get("foo").getException(), exception); Assert.assertEquals(responseData.getResponseEnvelope().getBatchResponseMap().size(), 1); }
public static int ARC(@NonNull final byte[] data, final int offset, final int length) { return CRC(0x8005, 0x0000, data, offset, length, true, true, 0x0000); }
@Test public void ARC_A() { final byte[] data = new byte[] { 'A' }; assertEquals(0x30C0, CRC16.ARC(data, 0, 1)); }
public List<Analyzer> getAnalyzers() { return getAnalyzers(AnalysisPhase.values()); }
@Test public void testGetAnalyzers() { AnalyzerService instance = new AnalyzerService(Thread.currentThread().getContextClassLoader(), getSettings()); List<Analyzer> result = instance.getAnalyzers(); boolean found = false; for (Analyzer a : result) { if ("Jar Analyzer".equals(a.getName())) { found = true; break; } } assertTrue("JarAnalyzer loaded", found); }
public boolean isBetween(Version from, Version to) { int thisVersion = this.pack(); int fromVersion = from.pack(); int toVersion = to.pack(); return thisVersion >= fromVersion && thisVersion <= toVersion; }
@Test public void isBetween() throws Exception { assertFalse(V3_0.isBetween(of(0, 0), of(1, 0))); assertFalse(V3_0.isBetween(of(4, 0), of(5, 0))); assertTrue(V3_0.isBetween(of(3, 0), of(5, 0))); assertTrue(V3_0.isBetween(of(2, 0), of(3, 0))); assertTrue(V3_0.isBetween(of(1, 0), of(5, 0))); }
public static RestSettingBuilder get(final String id) { return get(eq(checkId(id))); }
@Test public void should_throw_exception_for_resource_name_with_slash() { assertThrows(IllegalArgumentException.class, () -> server.resource("hello/world", get().response("hello"))); }
public <T extends AwsSyncClientBuilder> void applyHttpClientConfigurations(T builder) { if (Strings.isNullOrEmpty(httpClientType)) { httpClientType = CLIENT_TYPE_DEFAULT; } switch (httpClientType) { case CLIENT_TYPE_URLCONNECTION: UrlConnectionHttpClientConfigurations urlConnectionHttpClientConfigurations = loadHttpClientConfigurations(UrlConnectionHttpClientConfigurations.class.getName()); urlConnectionHttpClientConfigurations.configureHttpClientBuilder(builder); break; case CLIENT_TYPE_APACHE: ApacheHttpClientConfigurations apacheHttpClientConfigurations = loadHttpClientConfigurations(ApacheHttpClientConfigurations.class.getName()); apacheHttpClientConfigurations.configureHttpClientBuilder(builder); break; default: throw new IllegalArgumentException("Unrecognized HTTP client type " + httpClientType); } }
@Test public void testUrlHttpClientConfiguration() { Map<String, String> properties = Maps.newHashMap(); properties.put(HttpClientProperties.CLIENT_TYPE, "urlconnection"); HttpClientProperties httpClientProperties = new HttpClientProperties(properties); S3ClientBuilder mockS3ClientBuilder = Mockito.mock(S3ClientBuilder.class); ArgumentCaptor<SdkHttpClient.Builder> httpClientBuilderCaptor = ArgumentCaptor.forClass(SdkHttpClient.Builder.class); httpClientProperties.applyHttpClientConfigurations(mockS3ClientBuilder); Mockito.verify(mockS3ClientBuilder).httpClientBuilder(httpClientBuilderCaptor.capture()); SdkHttpClient.Builder capturedHttpClientBuilder = httpClientBuilderCaptor.getValue(); assertThat(capturedHttpClientBuilder) .as("Should use url connection http client") .isInstanceOf(UrlConnectionHttpClient.Builder.class); }
@GET @Path("/{entityType}/{entityId}") @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8 /* , MediaType.APPLICATION_XML */}) public TimelineEntity getEntity( @Context HttpServletRequest req, @Context HttpServletResponse res, @PathParam("entityType") String entityType, @PathParam("entityId") String entityId, @QueryParam("fields") String fields) { init(res); TimelineEntity entity = null; try { entity = timelineDataManager.getEntity( parseStr(entityType), parseStr(entityId), parseFieldsStr(fields, ","), getUser(req)); } catch (YarnException e) { // The user doesn't have the access to override the existing domain. LOG.info(e.getMessage(), e); throw new ForbiddenException(e); } catch (IllegalArgumentException e) { throw new BadRequestException(e); } catch (Exception e) { LOG.error("Error getting entity", e); throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); } if (entity == null) { throw new NotFoundException("Timeline entity " + new EntityIdentifier(parseStr(entityId), parseStr(entityType)) + " is not found"); } return entity; }
@Test void testSecondaryFilters() { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("timeline") .path("type_1") .queryParam("secondaryFilter", "user:username,appname:" + Integer.toString(Integer.MAX_VALUE)) .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString()); verifyEntities(response.getEntity(TimelineEntities.class)); }
public MailConfiguration getConfiguration() { if (configuration == null) { configuration = new MailConfiguration(getCamelContext()); } return configuration; }
@Test public void testDebugMode() { MailEndpoint endpoint = checkEndpoint("smtp://james@myhost?password=secret&debugMode=true"); MailConfiguration config = endpoint.getConfiguration(); assertEquals(true, config.isDebugMode()); }
public static String resolveMainClass( @Nullable String configuredMainClass, ProjectProperties projectProperties) throws MainClassInferenceException, IOException { if (configuredMainClass != null) { if (isValidJavaClass(configuredMainClass)) { return configuredMainClass; } throw new MainClassInferenceException( HelpfulSuggestions.forMainClassNotFound( "'mainClass' configured in " + projectProperties.getPluginName() + " is not a valid Java class: " + configuredMainClass, projectProperties.getPluginName())); } projectProperties.log( LogEvent.info( "Searching for main class... Add a 'mainClass' configuration to '" + projectProperties.getPluginName() + "' to improve build speed.")); String mainClassFromJarPlugin = projectProperties.getMainClassFromJarPlugin(); if (mainClassFromJarPlugin != null && isValidJavaClass(mainClassFromJarPlugin)) { return mainClassFromJarPlugin; } if (mainClassFromJarPlugin != null) { projectProperties.log( LogEvent.warn( "'mainClass' configured in " + projectProperties.getJarPluginName() + " is not a valid Java class: " + mainClassFromJarPlugin)); } projectProperties.log( LogEvent.info( "Could not find a valid main class from " + projectProperties.getJarPluginName() + "; looking into all class files to infer main class.")); MainClassFinder.Result mainClassFinderResult = MainClassFinder.find(projectProperties.getClassFiles(), projectProperties::log); switch (mainClassFinderResult.getType()) { case MAIN_CLASS_FOUND: return mainClassFinderResult.getFoundMainClass(); case MAIN_CLASS_NOT_FOUND: throw new MainClassInferenceException( HelpfulSuggestions.forMainClassNotFound( "Main class was not found", projectProperties.getPluginName())); case MULTIPLE_MAIN_CLASSES: throw new MainClassInferenceException( HelpfulSuggestions.forMainClassNotFound( "Multiple valid main classes were found: " + String.join(", ", mainClassFinderResult.getFoundMainClasses()), projectProperties.getPluginName())); default: throw new IllegalStateException("Cannot reach here"); } }
@Test public void testResolveMainClass_invalidMainClassConfigured() throws IOException { try { MainClassResolver.resolveMainClass("In Val id", mockProjectProperties); Assert.fail(); } catch (MainClassInferenceException ex) { MatcherAssert.assertThat( ex.getMessage(), CoreMatchers.containsString( "'mainClass' configured in jib-plugin is not a valid Java class: In Val id")); Mockito.verify(mockProjectProperties, Mockito.never()).log(Mockito.any()); } }
@Override public void checkTopicAccess( final KsqlSecurityContext securityContext, final String topicName, final AclOperation operation ) { checkAccess(new CacheKey(securityContext, AuthObjectType.TOPIC, topicName, operation)); }
@Test public void shouldCheckBackendValidatorOnFirstTopicAccessRequest() { // When cache.checkTopicAccess(securityContext, TOPIC_1, AclOperation.READ); // Then verify(backendValidator, times(1)) .checkTopicAccess(securityContext, TOPIC_1, AclOperation.READ); verifyNoMoreInteractions(backendValidator); }
public <T> T convert(String property, Class<T> targetClass) { final AbstractPropertyConverter<?> converter = converterRegistry.get(targetClass); if (converter == null) { throw new MissingFormatArgumentException("converter not found, can't convert from String to " + targetClass.getCanonicalName()); } return (T) converter.convert(property); }
@Test void testConvertLongForEmptyProperty() { assertNull(compositeConverter.convert(null, Long.class)); }
Schema getTableSchema(HoodieTableMetaClient metaClient, List<String> partitionFields) { try { Schema schema = convertSchema(tableSchemaResolverSupplier.apply(metaClient).getTableAvroSchema()); if (partitionFields.isEmpty()) { return schema; } else { return Schema.of(schema.getFields().stream().filter(field -> !partitionFields.contains(field.getName())).collect(Collectors.toList())); } } catch (Exception e) { throw new HoodieBigQuerySyncException("Failed to get table schema", e); } }
@Test void getTableSchema_withPartitionFields() throws Exception { HoodieTableMetaClient mockMetaClient = mock(HoodieTableMetaClient.class); TableSchemaResolver mockTableSchemaResolver = mock(TableSchemaResolver.class); when(mockTableSchemaResolver.getTableAvroSchema()).thenReturn(PRIMITIVE_TYPES); BigQuerySchemaResolver resolver = new BigQuerySchemaResolver(metaClient -> mockTableSchemaResolver); com.google.cloud.bigquery.Schema expected = com.google.cloud.bigquery.Schema.of( Field.newBuilder("requiredBoolean", StandardSQLTypeName.BOOL).setMode(Field.Mode.REQUIRED).build(), Field.newBuilder("optionalBoolean", StandardSQLTypeName.BOOL).setMode(Field.Mode.NULLABLE).build(), Field.newBuilder("requiredInt", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build(), Field.newBuilder("optionalInt", StandardSQLTypeName.INT64).setMode(Field.Mode.NULLABLE).build(), Field.newBuilder("requiredLong", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build(), Field.newBuilder("optionalLong", StandardSQLTypeName.INT64).setMode(Field.Mode.NULLABLE).build(), Field.newBuilder("requiredDouble", StandardSQLTypeName.FLOAT64).setMode(Field.Mode.REQUIRED).build(), Field.newBuilder("optionalDouble", StandardSQLTypeName.FLOAT64).setMode(Field.Mode.NULLABLE).build(), Field.newBuilder("requiredFloat", StandardSQLTypeName.FLOAT64).setMode(Field.Mode.REQUIRED).build(), Field.newBuilder("optionalFloat", StandardSQLTypeName.FLOAT64).setMode(Field.Mode.NULLABLE).build(), Field.newBuilder("optionalString", StandardSQLTypeName.STRING).setMode(Field.Mode.NULLABLE).build(), Field.newBuilder("requiredBytes", StandardSQLTypeName.BYTES).setMode(Field.Mode.REQUIRED).build(), Field.newBuilder("optionalBytes", StandardSQLTypeName.BYTES).setMode(Field.Mode.NULLABLE).build(), Field.newBuilder("requiredEnum", StandardSQLTypeName.STRING).setMode(Field.Mode.REQUIRED).build(), Field.newBuilder("optionalEnum", StandardSQLTypeName.STRING).setMode(Field.Mode.NULLABLE).build()); // expect 'requiredString' field to be removed Assertions.assertEquals(expected, resolver.getTableSchema(mockMetaClient, Collections.singletonList("requiredString"))); }
@Override public ByteBuf setChar(int index, int value) { setShort(index, value); return this; }
@Test public void testSetCharAfterRelease() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().setChar(0, 1); } }); }
public static FieldScope ignoringFields(int firstFieldNumber, int... rest) { return FieldScopeImpl.createIgnoringFields(asList(firstFieldNumber, rest)); }
@Test public void testEmptySubMessage() { Message message = parse("o_int: 1 o_sub_test_message: { }"); Message eqMessage = parse("o_int: 2 o_sub_test_message: { }"); Message diffMessage = parse("o_int: 3"); // Different logic gets exercised when we add an 'ignore' clause. // Let's ensure o_sub_test_message is compared properly in all cases. int fieldNumber = getFieldNumber("o_int"); expectThat(eqMessage).isNotEqualTo(message); expectThat(eqMessage).ignoringFieldAbsence().isNotEqualTo(message); expectThat(eqMessage).ignoringFields(fieldNumber).isEqualTo(message); expectThat(eqMessage).ignoringFields(fieldNumber).ignoringFieldAbsence().isEqualTo(message); expectThat(diffMessage).isNotEqualTo(message); expectThat(diffMessage).ignoringFieldAbsence().isNotEqualTo(message); expectThat(diffMessage).ignoringFields(fieldNumber).isNotEqualTo(message); expectThat(diffMessage).ignoringFields(fieldNumber).ignoringFieldAbsence().isEqualTo(message); }
public static <InputT, OutputT> DoFnInvoker<InputT, OutputT> invokerFor( DoFn<InputT, OutputT> fn) { return ByteBuddyDoFnInvokerFactory.only().newByteBuddyInvoker(fn); }
@Test public void testStartBundleException() throws Exception { DoFnInvoker.ArgumentProvider<Integer, Integer> mockArguments = mock(DoFnInvoker.ArgumentProvider.class); when(mockArguments.startBundleContext(any(DoFn.class))).thenReturn(null); DoFnInvoker<Integer, Integer> invoker = DoFnInvokers.invokerFor( new DoFn<Integer, Integer>() { @StartBundle public void startBundle(@SuppressWarnings("unused") StartBundleContext c) { throw new IllegalArgumentException("bogus"); } @ProcessElement public void processElement(@SuppressWarnings("unused") ProcessContext c) {} }); thrown.expect(UserCodeException.class); thrown.expectMessage("bogus"); invoker.invokeStartBundle(mockArguments); }
public AlterSourceCommand create(final AlterSource statement) { final DataSource dataSource = metaStore.getSource(statement.getName()); final String dataSourceType = statement.getDataSourceType().getKsqlType(); if (dataSource != null && dataSource.isSource()) { throw new KsqlException( String.format("Cannot alter %s '%s': ALTER operations are not supported on source %s.", dataSourceType.toLowerCase(), statement.getName().text(), dataSourceType.toLowerCase() + "s")); } final List<Column> newColumns = statement .getAlterOptions() .stream() .map( alterOption -> Column.of( ColumnName.of(alterOption.getColumnName()), alterOption.getType().getSqlType(), Namespace.VALUE, 0)) .collect(Collectors.toList()); return new AlterSourceCommand( statement.getName(), dataSourceType, newColumns ); }
@Test public void shouldCreateCommandForAlterStream() { // Given: final AlterSource alterSource = new AlterSource(STREAM_NAME, DataSourceType.KSTREAM, NEW_COLUMNS); // When: final AlterSourceCommand result = alterSourceFactory.create(alterSource); // Then: assertEquals(result.getKsqlType(), DataSourceType.KSTREAM.getKsqlType()); assertEquals(result.getSourceName(), STREAM_NAME); assertEquals(result.getNewColumns().size(), 1); }
static boolean needWrap(MethodDescriptor methodDescriptor, Class<?>[] parameterClasses, Class<?> returnClass) { String methodName = methodDescriptor.getMethodName(); // generic call must be wrapped if (CommonConstants.$INVOKE.equals(methodName) || CommonConstants.$INVOKE_ASYNC.equals(methodName)) { return true; } // echo must be wrapped if ($ECHO.equals(methodName)) { return true; } boolean returnClassProtobuf = isProtobufClass(returnClass); // Response foo() if (parameterClasses.length == 0) { return !returnClassProtobuf; } int protobufParameterCount = 0; int javaParameterCount = 0; int streamParameterCount = 0; boolean secondParameterStream = false; // count normal and protobuf param for (int i = 0; i < parameterClasses.length; i++) { Class<?> parameterClass = parameterClasses[i]; if (isProtobufClass(parameterClass)) { protobufParameterCount++; } else { if (isStreamType(parameterClass)) { if (i == 1) { secondParameterStream = true; } streamParameterCount++; } else { javaParameterCount++; } } } // more than one stream param if (streamParameterCount > 1) { throw new IllegalStateException("method params error: more than one Stream params. method=" + methodName); } // protobuf only support one param if (protobufParameterCount >= 2) { throw new IllegalStateException("method params error: more than one protobuf params. method=" + methodName); } // server stream support one normal param and one stream param if (streamParameterCount == 1) { if (javaParameterCount + protobufParameterCount > 1) { throw new IllegalStateException( "method params error: server stream does not support more than one normal param." + " method=" + methodName); } // server stream: void foo(Request, StreamObserver<Response>) if (!secondParameterStream) { throw new IllegalStateException( "method params error: server stream's second param must be StreamObserver." + " method=" + methodName); } } if (methodDescriptor.getRpcType() != MethodDescriptor.RpcType.UNARY) { if (MethodDescriptor.RpcType.SERVER_STREAM == methodDescriptor.getRpcType()) { if (!secondParameterStream) { throw new IllegalStateException( "method params error:server stream's second param must be StreamObserver." + " method=" + methodName); } } // param type must be consistent if (returnClassProtobuf) { if (javaParameterCount > 0) { throw new IllegalStateException( "method params error: both normal and protobuf param found. method=" + methodName); } } else { if (protobufParameterCount > 0) { throw new IllegalStateException("method params error method=" + methodName); } } } else { if (streamParameterCount > 0) { throw new IllegalStateException( "method params error: unary method should not contain any StreamObserver." + " method=" + methodName); } if (protobufParameterCount > 0 && returnClassProtobuf) { return false; } // handler reactor or rxjava only consider gen by proto if (isMono(returnClass) || isRx(returnClass)) { return false; } if (protobufParameterCount <= 0 && !returnClassProtobuf) { return true; } // handle grpc stub only consider gen by proto if (GRPC_ASYNC_RETURN_CLASS.equalsIgnoreCase(returnClass.getName()) && protobufParameterCount == 1) { return false; } // handle dubbo generated method if (TRI_ASYNC_RETURN_CLASS.equalsIgnoreCase(returnClass.getName())) { Class<?> actualReturnClass = (Class<?>) ((ParameterizedType) methodDescriptor.getMethod().getGenericReturnType()) .getActualTypeArguments()[0]; boolean actualReturnClassProtobuf = isProtobufClass(actualReturnClass); if (actualReturnClassProtobuf && protobufParameterCount == 1) { return false; } if (!actualReturnClassProtobuf && protobufParameterCount == 0) { return true; } } // todo remove this in future boolean ignore = checkNeedIgnore(returnClass); if (ignore) { return protobufParameterCount != 1; } throw new IllegalStateException("method params error method=" + methodName); } // java param should be wrapped return javaParameterCount > 0; }
@Test void testErrorBiStream() throws Exception { Method method = DescriptorService.class.getMethod("testErrorBiStream", HelloReply.class, StreamObserver.class); assertThrows(IllegalStateException.class, () -> { MethodDescriptor descriptor = new ReflectionMethodDescriptor(method); needWrap(descriptor); }); Method method2 = DescriptorService.class.getMethod("testErrorBiStream2", String.class, StreamObserver.class); assertThrows(IllegalStateException.class, () -> { MethodDescriptor descriptor = new ReflectionMethodDescriptor(method2); needWrap(descriptor); }); Method method3 = DescriptorService.class.getMethod("testErrorBiStream3", StreamObserver.class); assertThrows(IllegalStateException.class, () -> { MethodDescriptor descriptor = new ReflectionMethodDescriptor(method3); needWrap(descriptor); }); Method method4 = DescriptorService.class.getMethod("testErrorBiStream4", StreamObserver.class, String.class); assertThrows(IllegalStateException.class, () -> { MethodDescriptor descriptor = new ReflectionMethodDescriptor(method4); needWrap(descriptor); }); }
public synchronized boolean setExecutionThread(Thread t) { // The setting only fails if a thread tries to pick up a canceled operation. if (isCancelled() && t != null) { return false; } else { _executionThread = t; return true; } }
@Test public void testSetExecutionThread() { OperationFuture future = new OperationFuture("testSetExecutionThread"); assertTrue(future.setExecutionThread(new Thread())); future.cancel(true); assertTrue("Should be able to set the execution thread of canceled future to null", future.setExecutionThread(null)); assertFalse("Should failed to set execution thread for the canceled future.", future.setExecutionThread(new Thread())); }
public byte[] requestPrincipal() { return data.requestPrincipal(); }
@Test public void testGetPrincipal() { KafkaPrincipal kafkaPrincipal = new KafkaPrincipal(KafkaPrincipal.USER_TYPE, "principal", true); DefaultKafkaPrincipalBuilder kafkaPrincipalBuilder = new DefaultKafkaPrincipalBuilder(null, null); EnvelopeRequest.Builder requestBuilder = new EnvelopeRequest.Builder(ByteBuffer.allocate(0), kafkaPrincipalBuilder.serialize(kafkaPrincipal), "client-address".getBytes()); EnvelopeRequest request = requestBuilder.build(EnvelopeRequestData.HIGHEST_SUPPORTED_VERSION); assertEquals(kafkaPrincipal, kafkaPrincipalBuilder.deserialize(request.requestPrincipal())); }
public static Map<AbilityKey, Boolean> getStaticAbilities() { return INSTANCE.getSupportedAbilities(); }
@Test void testGetStaticAbilities() { // TODO add the sdk client abilities. assertTrue(SdkClientAbilities.getStaticAbilities().isEmpty()); }
public void validate(AlmSettingDto almSettingDto) { String bitbucketUrl = almSettingDto.getUrl(); String bitbucketToken = almSettingDto.getDecryptedPersonalAccessToken(encryption); if (bitbucketUrl == null || bitbucketToken == null) { throw new IllegalArgumentException("Your global Bitbucket Server configuration is incomplete."); } bitbucketServerRestClient.validateUrl(bitbucketUrl); bitbucketServerRestClient.validateToken(bitbucketUrl, bitbucketToken); bitbucketServerRestClient.validateReadPermission(bitbucketUrl, bitbucketToken); }
@Test public void validate_success_with_encrypted_token() { String encryptedToken = "abc"; String decryptedToken = "decrypted-token"; AlmSettingDto almSettingDto = createNewBitbucketDto("http://abc.com", encryptedToken); when(encryption.isEncrypted(encryptedToken)).thenReturn(true); when(encryption.decrypt(encryptedToken)).thenReturn(decryptedToken); underTest.validate(almSettingDto); verify(bitbucketServerRestClient, times(1)).validateUrl("http://abc.com"); verify(bitbucketServerRestClient, times(1)).validateToken("http://abc.com", decryptedToken); verify(bitbucketServerRestClient, times(1)).validateReadPermission("http://abc.com", decryptedToken); }
@Override protected int compareFirst(final Path p1, final Path p2) { if(StringUtils.isBlank(p1.attributes().getRegion()) && StringUtils.isBlank(p2.attributes().getRegion())) { return 0; } if(StringUtils.isBlank(p1.attributes().getRegion())) { return -1; } if(StringUtils.isBlank(p2.attributes().getRegion())) { return 1; } if(ascending) { return p1.attributes().getRegion().compareToIgnoreCase(p2.attributes().getRegion()); } return -p1.attributes().getRegion().compareToIgnoreCase(p2.attributes().getRegion()); }
@Test public void testCompareFirst() { assertEquals(0, new RegionComparator(true).compareFirst(new Path("/a", EnumSet.of(Path.Type.file)), new Path("/b", EnumSet.of(Path.Type.file)))); }
@SuppressWarnings("unchecked") public static <T> T coerceOutput(Object object, Class<T> targetClass) throws TemplateOutputCastException { Class<?> objectClass = object.getClass(); if (objectClass == targetClass) { return (T) object; } if (targetClass.isEnum()) { if (objectClass == String.class) { return (T) stringToEnum(targetClass, (String) object); } throw new TemplateOutputCastException("Output " + object + " has type " + object.getClass().getName() + ", and cannot be coerced to enum type " + targetClass.getName()); } return coerceCustomOutput(object, targetClass); }
@Test public static void testCoerceOutputNonNumericNumberCases() { // Convert Specific Floating point string - NaN to Double Object object1 = DataTemplateUtil.coerceOutput(NAN, Double.class); assertEquals(object1, Double.NaN); // Convert Specific Floating point string - POSITIVE_INFINITY to Double Object object2 = DataTemplateUtil.coerceOutput(POSITIVE_INFINITY, Double.class); assertEquals(object2, Double.POSITIVE_INFINITY); // Convert Specific Floating point string - NEGATIVE_INFINITY to Double Object object3 = DataTemplateUtil.coerceOutput(NEGATIVE_INFINITY, Double.class); assertEquals(object3, Double.NEGATIVE_INFINITY); // Convert Specific Floating point string - NaN to Float Object object4 = DataTemplateUtil.coerceOutput(NAN, Float.class); assertEquals(object4, Float.NaN); // Convert Specific Floating point string - POSITIVE_INFINITY to Float Object object5 = DataTemplateUtil.coerceOutput(POSITIVE_INFINITY, Float.class); assertEquals(object5, Float.POSITIVE_INFINITY); // Convert Specific Floating point string - NEGATIVE_INFINITY to Float Object object6 = DataTemplateUtil.coerceOutput(NEGATIVE_INFINITY, Float.class); assertEquals(object6, Float.NEGATIVE_INFINITY); }
@Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (!(request instanceof HttpServletRequest) || !(response instanceof HttpServletResponse) || monitoringDisabled || !instanceEnabled) { // si ce n'est pas une requête http ou si le monitoring est désactivé, on fait suivre chain.doFilter(request, response); return; } final HttpServletRequest httpRequest = (HttpServletRequest) request; final HttpServletResponse httpResponse = (HttpServletResponse) response; if (httpRequest.getRequestURI().equals(getMonitoringUrl(httpRequest))) { doMonitoring(httpRequest, httpResponse); return; } if (!httpCounter.isDisplayed() || isRequestExcluded((HttpServletRequest) request)) { // si cette url est exclue ou si le counter http est désactivé, on ne monitore pas cette requête http chain.doFilter(request, response); return; } doFilter(chain, httpRequest, httpResponse); }
@Test public void testDoFilterWithSessionTer() throws ServletException, IOException { final HttpServletRequest request = createNiceMock(HttpServletRequest.class); final HttpSession session = createNiceMock(HttpSession.class); expect(request.getSession(false)).andReturn(session); expect(session.getAttribute(SessionListener.SESSION_COUNTRY_KEY)) .andReturn(Locale.FRANCE.getCountry()).anyTimes(); expect(session.getAttribute(SessionListener.SESSION_REMOTE_ADDR)).andReturn("somewhere") .anyTimes(); expect(session.getAttribute(SessionListener.SESSION_REMOTE_USER)).andReturn("me") .anyTimes(); replay(session); doFilter(request); verify(session); }
@Override public File getFile(JobID jobId, PermanentBlobKey key) throws IOException { checkNotNull(jobId); return getFileInternal(jobId, key); }
@Test void permanentBlobCacheCanServeFilesFromPrepopulatedStorageDirectory( @TempDir Path storageDirectory) throws IOException { final JobID jobId = new JobID(); final byte[] fileContent = "foobar".getBytes(StandardCharsets.UTF_8); final PermanentBlobKey blobKey = TestingBlobUtils.writePermanentBlob(storageDirectory, jobId, fileContent); try (PermanentBlobCache permanentBlobCache = new PermanentBlobCache( new Configuration(), storageDirectory.toFile(), new VoidBlobStore(), null)) { final File blob = permanentBlobCache.getFile(jobId, blobKey); assertThat(Files.readAllBytes(blob.toPath())).isEqualTo(fileContent); } }
public static MemorySegment wrapCopy(byte[] bytes, int start, int end) throws IllegalArgumentException { checkArgument(end >= start); checkArgument(end <= bytes.length); MemorySegment copy = allocateUnpooledSegment(end - start); copy.put(0, bytes, start, copy.size()); return copy; }
@Test void testWrapCopyChangingData() { byte[] data = {1, 2, 3, 4, 5}; byte[] changingData = new byte[data.length]; arraycopy(data, 0, changingData, 0, data.length); MemorySegment segment = MemorySegmentFactory.wrapCopy(changingData, 0, changingData.length); changingData[0]++; assertThat(segment.getHeapMemory()).containsExactly(data); }
public static NeverTrigger ever() { // NeverTrigger ignores all inputs and is Window-type independent. return new NeverTrigger(); }
@Test public void testContinuation() throws Exception { assertEquals(Never.ever(), Never.ever().getContinuationTrigger()); }
public boolean isDeleted() { return bcId == INVALID_INODE_ID; }
@Test public void testIsDeleted() { BlockInfo blockInfo = new BlockInfoContiguous((short) 3); BlockCollection bc = Mockito.mock(BlockCollection.class); blockInfo.setBlockCollectionId(1000); Assert.assertFalse(blockInfo.isDeleted()); blockInfo.setBlockCollectionId(INVALID_INODE_ID); Assert.assertTrue(blockInfo.isDeleted()); }
@Override public void destroy() { for (MappedFile mf : this.mappedFiles) { mf.destroy(1000 * 3); } this.mappedFiles.clear(); this.setFlushedWhere(0); Set<String> storePathSet = getPaths(); storePathSet.addAll(getReadonlyPaths()); for (String path : storePathSet) { File file = new File(path); if (file.isDirectory()) { file.delete(); } } }
@Test public void testUpdatePathsOnline() { final byte[] fixedMsg = new byte[1024]; MessageStoreConfig config = new MessageStoreConfig(); config.setStorePathCommitLog("target/unit_test_store/a/" + MixAll.MULTI_PATH_SPLITTER + "target/unit_test_store/b/" + MixAll.MULTI_PATH_SPLITTER + "target/unit_test_store/c/"); MappedFileQueue mappedFileQueue = new MultiPathMappedFileQueue(config, 1024, null, null); String[] storePaths = config.getStorePathCommitLog().trim().split(MixAll.MULTI_PATH_SPLITTER); for (int i = 0; i < 1024; i++) { MappedFile mappedFile = mappedFileQueue.getLastMappedFile(fixedMsg.length * i); assertThat(mappedFile).isNotNull(); assertThat(mappedFile.appendMessage(fixedMsg)).isTrue(); int idx = i % storePaths.length; assertThat(mappedFile.getFileName().startsWith(storePaths[idx])).isTrue(); if (i == 500) { config.setStorePathCommitLog("target/unit_test_store/a/" + MixAll.MULTI_PATH_SPLITTER + "target/unit_test_store/b/"); storePaths = config.getStorePathCommitLog().trim().split(MixAll.MULTI_PATH_SPLITTER); } } mappedFileQueue.shutdown(1000); mappedFileQueue.destroy(); }
public <R> Choice<R> thenOption(Function<? super T, Optional<R>> function) { checkNotNull(function); Choice<T> thisChoice = this; return new Choice<R>() { @Override protected Iterator<R> iterator() { return Optional.presentInstances(Iterables.transform(thisChoice.asIterable(), function)) .iterator(); } }; }
@Test public void thenOption() { assertThat( Choice.from(ImmutableList.of(1, 2, 3)) .thenOption( Functions.forMap( ImmutableMap.of(2, Optional.of("foo")), Optional.<String>absent())) .asIterable()) .containsExactly("foo"); }
@Override public List<PrivilegedOperation> preStart(Container container) throws ResourceHandlerException { // 1. Get requested FPGA type and count, choose corresponding FPGA plugin(s) // 2. Use allocator.assignFpga(type, count) to get FPGAAllocation // 3. If required, download to ensure IP file exists and configure IP file for all devices List<PrivilegedOperation> ret = new ArrayList<>(); String containerIdStr = container.getContainerId().toString(); Resource requestedResource = container.getResource(); // Create device cgroups for the container cGroupsHandler.createCGroup(CGroupsHandler.CGroupController.DEVICES, containerIdStr); long deviceCount = requestedResource.getResourceValue(FPGA_URI); LOG.info(containerIdStr + " requested " + deviceCount + " Intel FPGA(s)"); String ipFilePath = null; try { // allocate even request 0 FPGA because we need to deny all device numbers for this container final String requestedIPID = getRequestedIPID(container); String localizedIPIDHash = null; ipFilePath = vendorPlugin.retrieveIPfilePath( requestedIPID, container.getWorkDir(), container.getResourceSet().getLocalizedResources()); if (ipFilePath != null) { try (FileInputStream fis = new FileInputStream(ipFilePath)) { localizedIPIDHash = DigestUtils.sha256Hex(fis); } catch (IOException e) { throw new ResourceHandlerException("Could not calculate SHA-256", e); } } FpgaResourceAllocator.FpgaAllocation allocation = allocator.assignFpga( vendorPlugin.getFpgaType(), deviceCount, container, localizedIPIDHash); LOG.info("FpgaAllocation:" + allocation); PrivilegedOperation privilegedOperation = new PrivilegedOperation(PrivilegedOperation.OperationType.FPGA, Arrays.asList(CONTAINER_ID_CLI_OPTION, containerIdStr)); if (!allocation.getDenied().isEmpty()) { List<Integer> denied = new ArrayList<>(); allocation.getDenied().forEach(device -> denied.add(device.getMinor())); privilegedOperation.appendArgs(Arrays.asList(EXCLUDED_FPGAS_CLI_OPTION, StringUtils.join(",", denied))); } privilegedOperationExecutor.executePrivilegedOperation( privilegedOperation, true); if (deviceCount > 0) { /** * We only support flashing one IP for all devices now. If user don't set this * environment variable, we assume that user's application can find the IP file by * itself. * Note that the IP downloading and reprogramming in advance in YARN is not necessary because * the OpenCL application may find the IP file and reprogram device on the fly. But YARN do this * for the containers will achieve the quickest reprogram path * * For instance, REQUESTED_FPGA_IP_ID = "matrix_mul" will make all devices * programmed with matrix multiplication IP * * In the future, we may support "matrix_mul:1,gzip:2" format to support different IP * for different devices * * */ ipFilePath = vendorPlugin.retrieveIPfilePath( getRequestedIPID(container), container.getWorkDir(), container.getResourceSet().getLocalizedResources()); if (ipFilePath == null) { LOG.warn("FPGA plugin failed to downloaded IP, please check the" + " value of environment viable: " + REQUEST_FPGA_IP_ID_KEY + " if you want YARN to program the device"); } else { LOG.info("IP file path:" + ipFilePath); List<FpgaDevice> allowed = allocation.getAllowed(); String majorMinorNumber; for (int i = 0; i < allowed.size(); i++) { FpgaDevice device = allowed.get(i); majorMinorNumber = device.getMajor() + ":" + device.getMinor(); String currentHash = allowed.get(i).getAocxHash(); if (currentHash != null && currentHash.equalsIgnoreCase(localizedIPIDHash)) { LOG.info("IP already in device \"" + allowed.get(i).getAliasDevName() + "," + majorMinorNumber + "\", skip reprogramming"); continue; } if (vendorPlugin.configureIP(ipFilePath, device)) { // update the allocator that we update an IP of a device allocator.updateFpga(containerIdStr, allowed.get(i), requestedIPID, localizedIPIDHash); //TODO: update the node constraint label } } } } } catch (ResourceHandlerException re) { allocator.cleanupAssignFpgas(containerIdStr); cGroupsHandler.deleteCGroup(CGroupsHandler.CGroupController.DEVICES, containerIdStr); throw re; } catch (PrivilegedOperationException e) { allocator.cleanupAssignFpgas(containerIdStr); cGroupsHandler.deleteCGroup(CGroupsHandler.CGroupController.DEVICES, containerIdStr); LOG.warn("Could not update cgroup for container", e); throw new ResourceHandlerException(e); } //isolation operation ret.add(new PrivilegedOperation( PrivilegedOperation.OperationType.ADD_PID_TO_CGROUP, PrivilegedOperation.CGROUP_ARG_PREFIX + cGroupsHandler.getPathForCGroupTasks( CGroupsHandler.CGroupController.DEVICES, containerIdStr))); return ret; }
@Test public void testSha256CalculationFails() throws ResourceHandlerException { expected.expect(ResourceHandlerException.class); expected.expectMessage("Could not calculate SHA-256"); dummyAocx.delete(); fpgaResourceHandler.preStart(mockContainer(0, 1, "GEMM")); }
public boolean wake() { synchronized (lock) { if (!isWaiting) { return false; } else { woken = true; lock.notify(); return true; } } }
@Test public void should_wait_until_woken() throws ExecutionException, InterruptedException { Waiter waiter = new Waiter(Duration.ofMillis(1000)); Future<Long> waitTime = executor.submit(new WaitForWaiter(waiter)); sleep(20); // give executor time to get to wait(..) waiter.wake(); assertTrue(waitTime.get() < 100L, "Waited: " + waitTime.get()); Future<Long> waitTime2 = executor.submit(new WaitForWaiter(waiter)); sleep(20); // give executor time to get to wait(..) waiter.wake(); assertTrue(waitTime2.get() < 100L, "Waited: " + waitTime2.get()); }
public static Bson idEq(@Nonnull String id) { return idEq(new ObjectId(id)); }
@Test void testIdEq() { final var a = new DTO("6627add0ee216425dd6df37c", "a"); final var b = new DTO("6627add0ee216425dd6df37d", "b"); collection.insertMany(List.of(a, b)); assertThat(collection.find(idEq(a.id())).first()).isEqualTo(a); assertThat(collection.find(idEq(new ObjectId(a.id()))).first()).isEqualTo(a); assertThat(collection.find(idEq(b.id())).first()).isEqualTo(b); assertThat(collection.find(idEq(new ObjectId(b.id()))).first()).isEqualTo(b); }
public MappedFile getLastMappedFile(final long startOffset, boolean needCreate) { long createOffset = -1; MappedFile mappedFileLast = getLastMappedFile(); if (mappedFileLast == null) { createOffset = startOffset - (startOffset % this.mappedFileSize); } if (mappedFileLast != null && mappedFileLast.isFull()) { createOffset = mappedFileLast.getFileFromOffset() + this.mappedFileSize; } if (createOffset != -1 && needCreate) { return tryCreateMappedFile(createOffset); } return mappedFileLast; }
@Test public void testGetLastMappedFile() { final String fixedMsg = "0123456789abcdef"; MappedFileQueue mappedFileQueue = new MappedFileQueue(storePath + File.separator + "a/", 1024, null); for (int i = 0; i < 1024; i++) { MappedFile mappedFile = mappedFileQueue.getLastMappedFile(0); assertThat(mappedFile).isNotNull(); assertThat(mappedFile.appendMessage(fixedMsg.getBytes())).isTrue(); } mappedFileQueue.shutdown(1000); mappedFileQueue.destroy(); }
@Nullable Throwable maybeError() { Object maybeError = delegate.getAttribute("error"); if (maybeError instanceof Throwable) return (Throwable) maybeError; maybeError = delegate.getAttribute(RequestDispatcher.ERROR_EXCEPTION); if (maybeError instanceof Throwable) return (Throwable) maybeError; return null; }
@Test void maybeError_fromRequestAttribute() { Exception requestError = new Exception(); when(request.getAttribute("error")).thenReturn(requestError); assertThat(wrapper.maybeError()).isSameAs(requestError); }
@Override public String toString() { final StringBuilder builder = StrUtil.builder(); for (int i = 0; i < size; i++) { builder.append(StrUtil.format("[{}] [{}] [{}]\n", ids.get(i), patterns.get(i), tasks.get(i))); } return builder.toString(); }
@Test @Disabled public void toStringTest(){ final TaskTable taskTable = new TaskTable(); taskTable.add(IdUtil.fastUUID(), new CronPattern("*/10 * * * * *"), ()-> Console.log("Task 1")); taskTable.add(IdUtil.fastUUID(), new CronPattern("*/20 * * * * *"), ()-> Console.log("Task 2")); taskTable.add(IdUtil.fastUUID(), new CronPattern("*/30 * * * * *"), ()-> Console.log("Task 3")); Console.log(taskTable); }
@Override public Table buildTable(final PropertiesList entity) { return new Builder() .withColumnHeaders(HEADERS) .withRows(defRowValues(propertiesListWithOverrides(entity))) .build(); }
@Test public void shouldHandleDefaultProperties() { // Given: final PropertiesList propList = new PropertiesList("list properties;", ImmutableList.of(new Property(SOME_KEY, "KSQL", "earliest")), Collections.emptyList(), ImmutableList.of(SOME_KEY) ); // When: final Table table = builder.buildTable(propList); // Then: assertThat(getRows(table), contains(row(SOME_KEY, "KSQL", "", "earliest"))); }
public static void handleException(final Exception cause) { if (null == cause) { log.info("cause is null"); return; } if (isIgnoredException(cause) || null != cause.getCause() && isIgnoredException(cause.getCause())) { log.debug("Ignored exception for: {}", cause.getMessage()); } else if (cause instanceof InterruptedException) { log.info("InterruptedException caught"); Thread.currentThread().interrupt(); } else { throw new ClusterRepositoryPersistException(cause); } }
@Test void assertHandleException() { ZookeeperExceptionHandler.handleException(null); ZookeeperExceptionHandler.handleException(new NoNodeException("")); ZookeeperExceptionHandler.handleException(new Exception(new NoNodeException(""))); try { ZookeeperExceptionHandler.handleException(new Exception("")); fail("must be failed handle new Exception()."); // CHECKSTYLE:OFF } catch (final Exception ex) { // CHECKSTYLE:ON assertThat(ex, instanceOf(ClusterRepositoryPersistException.class)); } }
public TopicStatsImpl add(TopicStats ts) { TopicStatsImpl stats = (TopicStatsImpl) ts; this.count++; this.msgRateIn += stats.msgRateIn; this.msgThroughputIn += stats.msgThroughputIn; this.msgRateOut += stats.msgRateOut; this.msgThroughputOut += stats.msgThroughputOut; this.bytesInCounter += stats.bytesInCounter; this.msgInCounter += stats.msgInCounter; this.bytesOutCounter += stats.bytesOutCounter; this.msgOutCounter += stats.msgOutCounter; this.waitingPublishers += stats.waitingPublishers; double newAverageMsgSize = (this.averageMsgSize * (this.count - 1) + stats.averageMsgSize) / this.count; this.averageMsgSize = newAverageMsgSize; this.storageSize += stats.storageSize; this.backlogSize += stats.backlogSize; this.publishRateLimitedTimes += stats.publishRateLimitedTimes; this.offloadedStorageSize += stats.offloadedStorageSize; this.nonContiguousDeletedMessagesRanges += stats.nonContiguousDeletedMessagesRanges; this.nonContiguousDeletedMessagesRangesSerializedSize += stats.nonContiguousDeletedMessagesRangesSerializedSize; this.delayedMessageIndexSizeInBytes += stats.delayedMessageIndexSizeInBytes; this.ongoingTxnCount = stats.ongoingTxnCount; this.abortedTxnCount = stats.abortedTxnCount; this.committedTxnCount = stats.committedTxnCount; this.backlogQuotaLimitTime = stats.backlogQuotaLimitTime; this.backlogQuotaLimitSize = stats.backlogQuotaLimitSize; if (stats.oldestBacklogMessageAgeSeconds > this.oldestBacklogMessageAgeSeconds) { this.oldestBacklogMessageAgeSeconds = stats.oldestBacklogMessageAgeSeconds; this.oldestBacklogMessageSubscriptionName = stats.oldestBacklogMessageSubscriptionName; } stats.bucketDelayedIndexStats.forEach((k, v) -> { TopicMetricBean topicMetricBean = this.bucketDelayedIndexStats.computeIfAbsent(k, __ -> new TopicMetricBean()); topicMetricBean.name = v.name; topicMetricBean.labelsAndValues = v.labelsAndValues; topicMetricBean.value += v.value; }); List<? extends PublisherStats> publisherStats = stats.getPublishers(); for (int index = 0; index < publisherStats.size(); index++) { PublisherStats s = publisherStats.get(index); if (s.isSupportsPartialProducer() && s.getProducerName() != null) { this.publishersMap.computeIfAbsent(s.getProducerName(), key -> { final PublisherStatsImpl newStats = new PublisherStatsImpl(); newStats.setSupportsPartialProducer(true); newStats.setProducerName(s.getProducerName()); return newStats; }).add((PublisherStatsImpl) s); } else { // Add a publisher stat entry to this.publishers // if this.publishers.size() is smaller than // the input stats.publishers.size(). // Here, index == this.publishers.size() means // this.publishers.size() is smaller than the input stats.publishers.size() if (index == this.publishers.size()) { PublisherStatsImpl newStats = new PublisherStatsImpl(); newStats.setSupportsPartialProducer(false); this.publishers.add(newStats); } this.publishers.get(index) .add((PublisherStatsImpl) s); } } for (Map.Entry<String, SubscriptionStatsImpl> entry : stats.subscriptions.entrySet()) { SubscriptionStatsImpl subscriptionStats = this.subscriptions.computeIfAbsent(entry.getKey(), k -> new SubscriptionStatsImpl()); subscriptionStats.add(entry.getValue()); } for (Map.Entry<String, ReplicatorStatsImpl> entry : stats.replication.entrySet()) { ReplicatorStatsImpl replStats = this.replication.computeIfAbsent(entry.getKey(), k -> { ReplicatorStatsImpl r = new ReplicatorStatsImpl(); r.setConnected(true); return r; }); replStats.add(entry.getValue()); } if (earliestMsgPublishTimeInBacklogs != 0 && ((TopicStatsImpl) ts).earliestMsgPublishTimeInBacklogs != 0) { earliestMsgPublishTimeInBacklogs = Math.min( earliestMsgPublishTimeInBacklogs, ((TopicStatsImpl) ts).earliestMsgPublishTimeInBacklogs ); } else { earliestMsgPublishTimeInBacklogs = Math.max( earliestMsgPublishTimeInBacklogs, ((TopicStatsImpl) ts).earliestMsgPublishTimeInBacklogs ); } return this; }
@Test public void testAdd_EarliestMsgPublishTimeInBacklogs_First0() { TopicStatsImpl stats1 = new TopicStatsImpl(); stats1.earliestMsgPublishTimeInBacklogs = 0L; TopicStatsImpl stats2 = new TopicStatsImpl(); stats2.earliestMsgPublishTimeInBacklogs = 20L; TopicStatsImpl aggregate = stats1.add(stats2); assertEquals(aggregate.earliestMsgPublishTimeInBacklogs, 20L); }
public static CvssV3 vectorToCvssV3(String vectorString, Double baseScore) { if (!vectorString.startsWith("CVSS:3")) { throw new IllegalArgumentException("Not a valid CVSSv3 vector string: " + vectorString); } final String versionString = vectorString.substring(5, vectorString.indexOf('/')); final String[] metricStrings = vectorString.substring(vectorString.indexOf('/') + 1).split("/"); final HashMap<String, String> metrics = new HashMap<>(); for (int i = 0; i < metricStrings.length; i++) { final String[] metricKeyVal = metricStrings[i].split(":"); if (metricKeyVal.length != 2) { throw new IllegalArgumentException( String.format("Not a valid CVSSv3 vector string '%s', invalid metric component '%s'", vectorString, metricStrings[i])); } metrics.put(metricKeyVal[0], metricKeyVal[1]); } if (!metrics.keySet().containsAll(BASE_METRICS_V3)) { throw new IllegalArgumentException( String.format("Not a valid CVSSv3 vector string '%s'; missing one or more required Base Metrics;", vectorString)); } final CvssV3Data.Version version = CvssV3Data.Version.fromValue(versionString); //"CVSS:3.1\/AV:L\/AC:L\/PR:L\/UI:N\/S:U\/C:N\/I:N\/A:H" final CvssV3Data.AttackVectorType attackVector = CvssV3Data.AttackVectorType.fromValue(metrics.get("AV")); final CvssV3Data.AttackComplexityType attackComplexity = CvssV3Data.AttackComplexityType.fromValue(metrics.get("AC")); final CvssV3Data.PrivilegesRequiredType privilegesRequired = CvssV3Data.PrivilegesRequiredType.fromValue(metrics.get("PR")); final CvssV3Data.UserInteractionType userInteraction = CvssV3Data.UserInteractionType.fromValue(metrics.get("UI")); final CvssV3Data.ScopeType scope = CvssV3Data.ScopeType.fromValue(metrics.get("S")); final CvssV3Data.CiaType confidentialityImpact = CvssV3Data.CiaType.fromValue(metrics.get("C")); final CvssV3Data.CiaType integrityImpact = CvssV3Data.CiaType.fromValue(metrics.get("I")); final CvssV3Data.CiaType availabilityImpact = CvssV3Data.CiaType.fromValue(metrics.get("A")); final String baseSeverityString = Cvss3Severity.of(baseScore.floatValue()).name(); final CvssV3Data.SeverityType baseSeverity = CvssV3Data.SeverityType.fromValue(baseSeverityString); final CvssV3Data data = new CvssV3Data(version, vectorString, attackVector, attackComplexity, privilegesRequired, userInteraction, scope, confidentialityImpact, integrityImpact, availabilityImpact, baseScore, baseSeverity, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null); final CvssV3 cvss = new CvssV3(null, null, data, null, null); return cvss; }
@Test public void testVectorToCvssV3() { String vectorString = "CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:U/C:N/I:N/A:H"; Double baseScore = 10.0; CvssV3 result = CvssUtil.vectorToCvssV3(vectorString, baseScore); assertEquals(CvssV3Data.Version._3_1, result.getCvssData().getVersion()); assertEquals(CvssV3Data.AttackVectorType.LOCAL, result.getCvssData().getAttackVector()); assertEquals(CvssV3Data.AttackComplexityType.LOW, result.getCvssData().getAttackComplexity()); assertEquals(CvssV3Data.PrivilegesRequiredType.LOW, result.getCvssData().getPrivilegesRequired()); assertEquals(CvssV3Data.UserInteractionType.NONE, result.getCvssData().getUserInteraction()); assertEquals(CvssV3Data.ScopeType.UNCHANGED, result.getCvssData().getScope()); assertEquals(CvssV3Data.CiaType.NONE, result.getCvssData().getConfidentialityImpact()); assertEquals(CvssV3Data.CiaType.NONE, result.getCvssData().getIntegrityImpact()); assertEquals(CvssV3Data.CiaType.HIGH, result.getCvssData().getAvailabilityImpact()); assertEquals(CvssV3Data.SeverityType.CRITICAL, result.getCvssData().getBaseSeverity()); assertEquals(10.0, result.getCvssData().getBaseScore(), 0); }
@VisibleForTesting static SwitchGenerationCase checkSwitchGenerationCase(Type type, List<RowExpression> values) { if (values.size() > 32) { // 32 is chosen because // * SET_CONTAINS performs worst when smaller than but close to power of 2 // * Benchmark shows performance of SET_CONTAINS is better at 50, but similar at 25. return SwitchGenerationCase.SET_CONTAINS; } if (!(type instanceof IntegerType || type instanceof BigintType || type instanceof DateType)) { return SwitchGenerationCase.HASH_SWITCH; } for (RowExpression expression : values) { // For non-constant expressions, they will be added to the default case in the generated switch code. They do not affect any of // the cases other than the default one. Therefore, it's okay to skip them when choosing between DIRECT_SWITCH and HASH_SWITCH. // Same argument applies for nulls. if (!(expression instanceof ConstantExpression)) { continue; } Object constant = ((ConstantExpression) expression).getValue(); if (constant == null) { continue; } long longConstant = ((Number) constant).longValue(); if (longConstant < Integer.MIN_VALUE || longConstant > Integer.MAX_VALUE) { return SwitchGenerationCase.HASH_SWITCH; } } return SwitchGenerationCase.DIRECT_SWITCH; }
@Test public void testInteger() { FunctionAndTypeManager functionAndTypeManager = createTestMetadataManager().getFunctionAndTypeManager(); List<RowExpression> values = new ArrayList<>(); values.add(constant((long) Integer.MIN_VALUE, INTEGER)); values.add(constant((long) Integer.MAX_VALUE, INTEGER)); values.add(constant(3L, INTEGER)); assertEquals(checkSwitchGenerationCase(INTEGER, values), DIRECT_SWITCH); values.add(constant(null, INTEGER)); assertEquals(checkSwitchGenerationCase(INTEGER, values), DIRECT_SWITCH); values.add(new CallExpression( CAST.name(), functionAndTypeManager.lookupCast(CAST, DOUBLE, INTEGER), INTEGER, Collections.singletonList(constant(12345678901234.0, DOUBLE)))); assertEquals(checkSwitchGenerationCase(INTEGER, values), DIRECT_SWITCH); for (int i = 6; i <= 32; ++i) { values.add(constant((long) i, INTEGER)); } assertEquals(checkSwitchGenerationCase(INTEGER, values), DIRECT_SWITCH); values.add(constant(33L, INTEGER)); assertEquals(checkSwitchGenerationCase(INTEGER, values), SET_CONTAINS); }
@Nullable @Override public URL getRaw() { return haloProperties.getExternalUrl(); }
@Test void getRaw() throws MalformedURLException { var fakeUri = URI.create("http://localhost/fake"); when(haloProperties.getExternalUrl()).thenReturn(fakeUri.toURL()); assertEquals(fakeUri.toURL(), externalUrl.getRaw()); when(haloProperties.getExternalUrl()).thenReturn(null); assertNull(externalUrl.getRaw()); }