focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public ServiceModel toServiceModel(List<ApplicationInfo> allApplicationInfos, ServiceStatusProvider serviceStatusProvider) { Map<ApplicationInstanceReference, ApplicationInstance> applicationInstances = allApplicationInfos.stream() .map(info -> new ApplicationInstanceGenerator(info, zone) .makeApplicationInstance(serviceStatusProvider)) .collect(Collectors.toMap(ApplicationInstance::reference, Function.identity())); return new ServiceModel(applicationInstances); }
@Test public void toApplicationModel() throws Exception { Zone zone = new Zone(Environment.from(ENVIRONMENT), RegionName.from(REGION)); ModelGenerator modelGenerator = new ModelGenerator(zone); SlobrokMonitorManagerImpl slobrokMonitorManager = mock(SlobrokMonitorManagerImpl.class); when(slobrokMonitorManager.getStatus(any(), any(), any(), any())) .thenReturn(new ServiceStatusInfo(ServiceStatus.UP)); ServiceModel serviceModel = modelGenerator.toServiceModel( getExampleApplicationInfos(), slobrokMonitorManager); Map<ApplicationInstanceReference, ApplicationInstance> applicationInstances = serviceModel.getAllApplicationInstances(); assertEquals(2, applicationInstances.size()); Iterator<Map.Entry<ApplicationInstanceReference, ApplicationInstance>> iterator = applicationInstances.entrySet().iterator(); ApplicationInstance applicationInstance1 = iterator.next().getValue(); ApplicationInstance applicationInstance2 = iterator.next().getValue(); if (applicationInstance1.applicationInstanceId().equals(configServerApplication.getApplicationInstanceId())) { verifyConfigServerApplication(applicationInstance1); verifyOtherApplication(applicationInstance2); } else { verifyConfigServerApplication(applicationInstance2); verifyOtherApplication(applicationInstance1); } }
@Override public Object handle(ProceedingJoinPoint proceedingJoinPoint, TimeLimiter timeLimiter, String methodName) throws Throwable { TimeLimiterTransformer<?> timeLimiterTransformer = TimeLimiterTransformer.of(timeLimiter); Object returnValue = proceedingJoinPoint.proceed(); return executeRxJava3Aspect(timeLimiterTransformer, returnValue, methodName); }
@Test public void testRxJava3Types() throws Throwable { TimeLimiter timeLimiter = TimeLimiter.ofDefaults("test"); when(proceedingJoinPoint.proceed()).thenReturn(Single.just("Test")); assertThat(rxJava3TimeLimiterAspectExt.handle(proceedingJoinPoint, timeLimiter, "testMethod")).isNotNull(); when(proceedingJoinPoint.proceed()).thenReturn(Flowable.just("Test")); assertThat(rxJava3TimeLimiterAspectExt.handle(proceedingJoinPoint, timeLimiter, "testMethod")).isNotNull(); when(proceedingJoinPoint.proceed()).thenReturn(Observable.just("Test")); assertThat(rxJava3TimeLimiterAspectExt.handle(proceedingJoinPoint, timeLimiter, "testMethod")).isNotNull(); when(proceedingJoinPoint.proceed()).thenReturn(Completable.complete()); assertThat(rxJava3TimeLimiterAspectExt.handle(proceedingJoinPoint, timeLimiter, "testMethod")).isNotNull(); when(proceedingJoinPoint.proceed()).thenReturn(Maybe.just("Test")); assertThat(rxJava3TimeLimiterAspectExt.handle(proceedingJoinPoint, timeLimiter, "testMethod")).isNotNull(); }
@Override public boolean equals(Object obj) { if (obj instanceof ArchivedJson) { ArchivedJson other = (ArchivedJson) obj; return this.path.equals(other.path) && this.json.equals(other.json); } else { return false; } }
@Test void testEquals() { ArchivedJson original = new ArchivedJson("path", "json"); ArchivedJson twin = new ArchivedJson("path", "json"); ArchivedJson identicalPath = new ArchivedJson("path", "hello"); ArchivedJson identicalJson = new ArchivedJson("hello", "json"); assertThat(original).isEqualTo(original); assertThat(twin).isEqualTo(original); assertThat(identicalPath).isNotEqualTo(original); assertThat(identicalJson).isNotEqualTo(original); }
@SuppressWarnings("unchecked") @Override public boolean setFlushListener(final CacheFlushListener<Windowed<K>, V> listener, final boolean sendOldValues) { final SessionStore<Bytes, byte[]> wrapped = wrapped(); if (wrapped instanceof CachedStateStore) { return ((CachedStateStore<byte[], byte[]>) wrapped).setFlushListener( record -> listener.apply( record.withKey(SessionKeySchema.from(record.key(), serdes.keyDeserializer(), serdes.topic())) .withValue(new Change<>( record.value().newValue != null ? serdes.valueFrom(record.value().newValue) : null, record.value().oldValue != null ? serdes.valueFrom(record.value().oldValue) : null, record.value().isLatest )) ), sendOldValues); } return false; }
@SuppressWarnings("unchecked") @Test public void shouldSetFlushListenerOnWrappedCachingStore() { setUpWithoutContext(); final CachedSessionStore cachedSessionStore = mock(CachedSessionStore.class); when(cachedSessionStore.setFlushListener(any(CacheFlushListener.class), eq(false))).thenReturn(true); store = new MeteredSessionStore<>( cachedSessionStore, STORE_TYPE, Serdes.String(), Serdes.String(), new MockTime()); assertTrue(store.setFlushListener(null, false)); }
@Override public ResultSet getTables(Connection connection, String dbName) throws SQLException { return connection.getMetaData().getTables(connection.getCatalog(), dbName, null, new String[] {"TABLE", "VIEW"}); }
@Test public void testListTableNames() throws SQLException { new Expectations() { { dataSource.getConnection(); result = connection; minTimes = 0; connection.getCatalog(); result = "t1"; minTimes = 0; connection.getMetaData().getTables("t1", "test", null, new String[] {"TABLE", "VIEW"}); result = tableResult; minTimes = 0; } }; try { JDBCMetadata jdbcMetadata = new JDBCMetadata(properties, "catalog", dataSource); List<String> result = jdbcMetadata.listTableNames("test"); List<String> expectResult = Lists.newArrayList("tbl1", "tbl2", "tbl3"); Assert.assertEquals(expectResult, result); } catch (Exception e) { Assert.fail(); } }
public static void main(String[] args) { var thief = new HalflingThief(new HitAndRunMethod()); thief.steal(); thief.changeMethod(new SubtleMethod()); thief.steal(); }
@Test void shouldExecuteWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
public static < EventTypeT, EventKeyTypeT, ResultTypeT, StateTypeT extends MutableState<EventTypeT, ResultTypeT>> OrderedEventProcessor<EventTypeT, EventKeyTypeT, ResultTypeT, StateTypeT> create( OrderedProcessingHandler<EventTypeT, EventKeyTypeT, StateTypeT, ResultTypeT> handler) { return new AutoValue_OrderedEventProcessor<>(handler); }
@Test public void testOutOfSequenceProcessing() throws CannotProvideCoderException { Event[] events = { Event.create(2, "id-1", "c"), Event.create(1, "id-1", "b"), Event.create(0, "id-1", "a"), Event.create(3, "id-1", "d"), Event.create(1, "id-2", "b"), Event.create(2, "id-2", "c"), Event.create(4, "id-2", "e"), Event.create(0, "id-2", "a"), Event.create(3, "id-2", "d") }; Collection<KV<String, OrderedProcessingStatus>> expectedStatuses = new ArrayList<>(); expectedStatuses.add( KV.of( "id-1", OrderedProcessingStatus.create( 3L, 0, null, null, 4, Arrays.stream(events).filter(e -> e.getKey().equals("id-1")).count(), 0, false))); expectedStatuses.add( KV.of( "id-2", OrderedProcessingStatus.create( 4L, 0, null, null, 5, Arrays.stream(events).filter(e -> e.getKey().equals("id-2")).count(), 0, false))); Collection<KV<String, String>> expectedOutput = new ArrayList<>(); expectedOutput.add(KV.of("id-1", "a")); expectedOutput.add(KV.of("id-1", "ab")); expectedOutput.add(KV.of("id-1", "abc")); expectedOutput.add(KV.of("id-1", "abcd")); expectedOutput.add(KV.of("id-2", "a")); expectedOutput.add(KV.of("id-2", "ab")); expectedOutput.add(KV.of("id-2", "abc")); expectedOutput.add(KV.of("id-2", "abcd")); expectedOutput.add(KV.of("id-2", "abcde")); testProcessing( events, expectedStatuses, expectedOutput, EMISSION_FREQUENCY_ON_EVERY_ELEMENT, INITIAL_SEQUENCE_OF_0, LARGE_MAX_RESULTS_PER_OUTPUT, DONT_PRODUCE_STATUS_ON_EVERY_EVENT); }
public static DynamicVoter parse(String input) { input = input.trim(); int atIndex = input.indexOf("@"); if (atIndex < 0) { throw new IllegalArgumentException("No @ found in dynamic voter string."); } if (atIndex == 0) { throw new IllegalArgumentException("Invalid @ at beginning of dynamic voter string."); } String idString = input.substring(0, atIndex); int nodeId; try { nodeId = Integer.parseInt(idString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse node id in dynamic voter string.", e); } if (nodeId < 0) { throw new IllegalArgumentException("Invalid negative node id " + nodeId + " in dynamic voter string."); } input = input.substring(atIndex + 1); if (input.isEmpty()) { throw new IllegalArgumentException("No hostname found after node id."); } String host; if (input.startsWith("[")) { int endBracketIndex = input.indexOf("]"); if (endBracketIndex < 0) { throw new IllegalArgumentException("Hostname began with left bracket, but no right " + "bracket was found."); } host = input.substring(1, endBracketIndex); input = input.substring(endBracketIndex + 1); } else { int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following hostname could be found."); } host = input.substring(0, endColonIndex); input = input.substring(endColonIndex); } if (!input.startsWith(":")) { throw new IllegalArgumentException("Port section must start with a colon."); } input = input.substring(1); int endColonIndex = input.indexOf(":"); if (endColonIndex < 0) { throw new IllegalArgumentException("No colon following port could be found."); } String portString = input.substring(0, endColonIndex); int port; try { port = Integer.parseInt(portString); } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse port in dynamic voter string.", e); } if (port < 0 || port > 65535) { throw new IllegalArgumentException("Invalid port " + port + " in dynamic voter string."); } String directoryIdString = input.substring(endColonIndex + 1); Uuid directoryId; try { directoryId = Uuid.fromString(directoryIdString); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Failed to parse directory ID in dynamic voter string.", e); } return new DynamicVoter(directoryId, nodeId, host, port); }
@Test public void testParseDynamicVoter() { assertEquals(new DynamicVoter(Uuid.fromString("K90IZ-0DRNazJ49kCZ1EMQ"), 2, "localhost", (short) 8020), DynamicVoter.parse("2@localhost:8020:K90IZ-0DRNazJ49kCZ1EMQ")); }
@Override public void start() { this.analysisDate = loadAnalysisDate(); this.projectVersion = settings.get(PROJECT_VERSION_PROPERTY) .map(StringUtils::trimToNull) .filter(validateLengthLimit("project version")) .orElse(null); this.buildString = settings.get(BUILD_STRING_PROPERTY) .map(StringUtils::trimToNull) .filter(validateLengthLimit("buildString")) .orElse(null); }
@Test void emptyDate() { settings.setProperty(CoreProperties.PROJECT_DATE_PROPERTY, ""); settings.setProperty(CoreProperties.PROJECT_VERSION_PROPERTY, "version"); assertThatThrownBy(() -> underTest.start()) .isInstanceOf(RuntimeException.class); }
public static void validateSchema(@Nonnull Schema schema, @Nonnull SchemaConformingTransformerConfig transformerConfig) { validateSchemaFieldNames(schema.getPhysicalColumnNames(), transformerConfig); String indexableExtrasFieldName = transformerConfig.getIndexableExtrasField(); getAndValidateExtrasFieldType(schema, indexableExtrasFieldName); String unindexableExtrasFieldName = transformerConfig.getUnindexableExtrasField(); if (null != unindexableExtrasFieldName) { getAndValidateExtrasFieldType(schema, indexableExtrasFieldName); } validateSchemaAndCreateTree(schema); }
@Test public void testInvalidFieldNamesInSchema() { // Ensure schema fields which end with unindexableFieldSuffix are caught as invalid Assert.assertThrows(() -> { Schema schema = createDefaultSchemaBuilder().addSingleValueDimension("a" + UNINDEXABLE_FIELD_SUFFIX, DataType.STRING) .addSingleValueDimension("a.b" + UNINDEXABLE_FIELD_SUFFIX, DataType.INT).build(); SchemaConformingTransformer.validateSchema(schema, new SchemaConformingTransformerConfig(INDEXABLE_EXTRAS_FIELD_NAME, null, UNINDEXABLE_FIELD_SUFFIX, null)); }); // Ensure schema fields which are in fieldPathsToDrop are caught as invalid Assert.assertThrows(() -> { Schema schema = createDefaultSchemaBuilder().addSingleValueDimension("a", DataType.STRING) .addSingleValueDimension("b.c", DataType.INT).build(); Set<String> fieldPathsToDrop = new HashSet<>(Arrays.asList("a", "b.c")); SchemaConformingTransformer.validateSchema(schema, new SchemaConformingTransformerConfig(INDEXABLE_EXTRAS_FIELD_NAME, null, null, fieldPathsToDrop)); }); }
public static <T> T reflector(Class<T> iClass) { return reflector(iClass, null); }
@Ignore @Test public void fieldPerf() { SomeClass i = new SomeClass("c"); System.out.println("reflection = " + Collections.singletonList(fieldByReflectionHelpers(i))); System.out.println("accessor = " + Collections.singletonList(fieldByReflector(i))); _SomeClass_ accessor = reflector(_SomeClass_.class, i); time("ReflectionHelpers", 10_000_000, () -> fieldByReflectionHelpers(i)); time("accessor", 10_000_000, () -> fieldByReflector(i)); time("saved accessor", 10_000_000, () -> fieldBySavedReflector(accessor)); time("ReflectionHelpers", 10_000_000, () -> fieldByReflectionHelpers(i)); time("accessor", 10_000_000, () -> fieldByReflector(i)); time("saved accessor", 10_000_000, () -> fieldBySavedReflector(accessor)); }
public BackgroundException map(HttpResponse response) throws IOException { final S3ServiceException failure; if(null == response.getEntity()) { failure = new S3ServiceException(response.getStatusLine().getReasonPhrase()); } else { EntityUtils.updateEntity(response, new BufferedHttpEntity(response.getEntity())); failure = new S3ServiceException(response.getStatusLine().getReasonPhrase(), EntityUtils.toString(response.getEntity())); } failure.setResponseCode(response.getStatusLine().getStatusCode()); if(response.containsHeader(MINIO_ERROR_CODE)) { failure.setErrorCode(response.getFirstHeader(MINIO_ERROR_CODE).getValue()); } if(response.containsHeader(MINIO_ERROR_DESCRIPTION)) { failure.setErrorMessage(response.getFirstHeader(MINIO_ERROR_DESCRIPTION).getValue()); } return this.map(failure); }
@Test public void testIAMFailure() { assertEquals("The IAM policy must allow the action s3:GetBucketLocation on the resource arn:aws:s3:::endpoint-9a527d70-d432-4601-b24b-735e721b82c9.", new S3ExceptionMappingService().map("The IAM policy must allow the action s3:GetBucketLocation on the resource arn:aws:s3:::endpoint-9a527d70-d432-4601-b24b-735e721b82c9", new ServiceException("message")).getMessage()); assertEquals("Message. Please contact your web hosting service provider for assistance.", new S3ExceptionMappingService().map("The IAM policy must allow the action s3:GetBucketLocation on the resource arn:aws:s3:::endpoint-9a527d70-d432-4601-b24b-735e721b82c9", new ServiceException("message")).getDetail()); }
public static String toJsonString(final Token<? extends TokenIdentifier> token ) throws IOException { return toJsonString(Token.class, toJsonMap(token)); }
@Test public void testSnapshotDiffReportListing() throws IOException { List<DiffReportListingEntry> mlist = new ChunkedArrayList<>(); List<DiffReportListingEntry> clist = new ChunkedArrayList<>(); List<DiffReportListingEntry> dlist = new ChunkedArrayList<>(); clist.add(new DiffReportListingEntry( 1L, 2L, DFSUtilClient.string2Bytes("dir1/file2"), false, null)); clist.add(new DiffReportListingEntry( 1L, 3L, DFSUtilClient.string2Bytes("dir1/file3"), false, null)); dlist.add(new DiffReportListingEntry( 1L, 4L, DFSUtilClient.string2Bytes("dir1/file4"), false, null)); dlist.add(new DiffReportListingEntry( 1L, 5L, DFSUtilClient.string2Bytes("dir1/file5"), true, DFSUtilClient.string2Bytes("dir1/file6"))); SnapshotDiffReportListing report = new SnapshotDiffReportListing( DFSUtilClient.string2Bytes("dir1/file2"), mlist, clist, dlist, 3, true); String jsonString = JsonUtil.toJsonString(report); Map<?, ?> json = READER.readValue(jsonString); SnapshotDiffReportListing parsed = JsonUtilClient.toSnapshotDiffReportListing(json); assertEquals(report, parsed); }
@Nonnull public static <T> T checkNonNullAndSerializable(@Nonnull T object, @Nonnull String objectName) { //noinspection ConstantConditions if (object == null) { throw new IllegalArgumentException('"' + objectName + "\" must not be null"); } checkSerializable(object, objectName); return object; }
@Test public void whenNonSerializableToCheckNonNullAndSerializable_thenThrowException() { assertThatThrownBy(() -> Util.checkNonNullAndSerializable(new HashMap<>().entrySet(), "object")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("\"object\" must implement Serializable"); }
public void dockerClient(Action<? super DockerClientParameters> action) { action.execute(dockerClient); }
@Test public void testDockerClient() { testJibExtension.dockerClient( dockerClient -> { dockerClient.setExecutable("test-executable"); dockerClient.setEnvironment(ImmutableMap.of("key1", "val1", "key2", "val2")); }); assertThat(testJibExtension.getDockerClient().getExecutablePath()) .isEqualTo(Paths.get("test-executable")); assertThat(testJibExtension.getDockerClient().getEnvironment()) .containsExactly("key1", "val1", "key2", "val2") .inOrder(); }
@Udf public String extractHost( @UdfParameter(description = "a valid URL") final String input) { return UrlParser.extract(input, URI::getHost); }
@Test public void shouldReturnNullIfNoHost() { assertThat(extractUdf.extractHost("https:///current/ksql/docs/syntax-reference.html#scalar-functions"), nullValue()); }
@PostMapping("/apps") public AppDTO create(@Valid @RequestBody AppDTO dto) { App entity = BeanUtils.transform(App.class, dto); App managedEntity = appService.findOne(entity.getAppId()); if (managedEntity != null) { throw BadRequestException.appAlreadyExists(entity.getAppId()); } entity = adminService.createNewApp(entity); return BeanUtils.transform(AppDTO.class, entity); }
@Test @Sql(scripts = "/controller/cleanup.sql", executionPhase = ExecutionPhase.AFTER_TEST_METHOD) public void testCreate() { AppDTO dto = generateSampleDTOData(); ResponseEntity<AppDTO> response = restTemplate.postForEntity(getBaseAppUrl(), dto, AppDTO.class); AppDTO result = response.getBody(); Assert.assertEquals(HttpStatus.OK, response.getStatusCode()); Assert.assertEquals(dto.getAppId(), result.getAppId()); Assert.assertTrue(result.getId() > 0); App savedApp = appRepository.findById(result.getId()).orElse(null); Assert.assertEquals(dto.getAppId(), savedApp.getAppId()); Assert.assertNotNull(savedApp.getDataChangeCreatedTime()); }
public static <X, Y> List<LocalProperty<Y>> translate(List<? extends LocalProperty<X>> properties, Function<X, Optional<Y>> translator) { properties = normalizeAndPrune(properties); ImmutableList.Builder<LocalProperty<Y>> builder = ImmutableList.builder(); for (LocalProperty<X> property : properties) { Optional<LocalProperty<Y>> translated = property.translate(translator); if (translated.isPresent()) { builder.add(translated.get()); } else if (!(property instanceof ConstantProperty)) { break; // Only break if we fail to translate non-constants } } return builder.build(); }
@Test public void testTranslate() { Map<String, String> map = ImmutableMap.of(); List<LocalProperty<String>> input = ImmutableList.of(); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of()); map = ImmutableMap.of(); input = ImmutableList.of(grouped("a")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of()); map = ImmutableMap.of("a", "a1"); input = ImmutableList.of(grouped("a")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of(grouped("a1"))); map = ImmutableMap.of(); input = ImmutableList.of(constant("a")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of()); map = ImmutableMap.of(); input = ImmutableList.of(constant("a"), grouped("b")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of()); map = ImmutableMap.of("b", "b1"); input = ImmutableList.of(constant("a"), grouped("b")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of(grouped("b1"))); map = ImmutableMap.of("a", "a1", "b", "b1"); input = ImmutableList.of(constant("a"), grouped("b")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of(constant("a1"), grouped("b1"))); map = ImmutableMap.of("a", "a1", "b", "b1"); input = ImmutableList.of(grouped("a", "b")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of(grouped("a1", "b1"))); map = ImmutableMap.of("a", "a1", "c", "c1"); input = ImmutableList.of(constant("a"), grouped("b"), grouped("c")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of(constant("a1"))); map = ImmutableMap.of("a", "a1", "c", "c1"); input = ImmutableList.of(grouped("a", "b"), grouped("c")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of()); map = ImmutableMap.of("a", "a1", "c", "c1"); input = ImmutableList.of(grouped("a"), grouped("b"), grouped("c")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of(grouped("a1"))); map = ImmutableMap.of("a", "a1", "c", "c1"); input = ImmutableList.of(constant("b"), grouped("a", "b"), grouped("c")); // Because b is constant, we can rewrite (a, b) assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of(grouped("a1"), grouped("c1"))); map = ImmutableMap.of("a", "a1", "c", "c1"); input = ImmutableList.of(grouped("a"), constant("b"), grouped("c")); // Don't fail c translation due to a failed constant translation assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of(grouped("a1"), grouped("c1"))); map = ImmutableMap.of("a", "a1", "b", "b1", "c", "c1"); input = ImmutableList.of(grouped("a"), constant("b"), grouped("c")); assertEquals(LocalProperties.translate(input, translateWithMap(map)), ImmutableList.of(grouped("a1"), constant("b1"), grouped("c1"))); }
public double sphericalDistance(LatLong other) { return LatLongUtils.sphericalDistance(this, other); }
@Test public void sphericalDistance_originToNorthPole_returnQuarterOfEarthEquatorCircumference() { // This is the origin of the WGS-84 reference system LatLong zeroZero = new LatLong(0d, 0d); // Calculating the distance between the north pole and the equator LatLong northPole = new LatLong(90d, 0d); double spherical = LatLongUtils.sphericalDistance(zeroZero, northPole); assertEquals(EARTH_EQUATOR_CIRCUMFERENCE / 4, spherical, 0d); }
@Override public void getConfig(ComponentsConfig.Builder builder) { builder.setApplyOnRestart(getDeferChangesUntilRestart()); // Sufficient to set on one config builder.components.addAll(ComponentsConfigGenerator.generate(getAllComponents())); builder.components(new ComponentsConfig.Components.Builder().id("com.yahoo.container.core.config.HandlersConfigurerDi$RegistriesHack")); }
@Test void search_and_docproc_bundles_are_not_installed_for_cluster_controllers() { MockRoot root = createRoot(false); ClusterControllerContainerCluster cluster = createClusterControllerCluster(root); var bundleBuilder = new PlatformBundlesConfig.Builder(); cluster.getConfig(bundleBuilder); List<Path> installedBundles = bundleBuilder.build().bundlePaths().stream().map(Paths::get).toList(); installedBundles.forEach(bundle -> assertFalse(PlatformBundles.SEARCH_AND_DOCPROC_BUNDLES.contains(bundle))); }
public void notifyAllocation( AllocationID allocationId, FineGrainedTaskManagerSlot taskManagerSlot) { Preconditions.checkNotNull(allocationId); Preconditions.checkNotNull(taskManagerSlot); switch (taskManagerSlot.getState()) { case PENDING: ResourceProfile newPendingResource = pendingResource.merge(taskManagerSlot.getResourceProfile()); Preconditions.checkState(totalResource.allFieldsNoLessThan(newPendingResource)); pendingResource = newPendingResource; break; case ALLOCATED: unusedResource = unusedResource.subtract(taskManagerSlot.getResourceProfile()); break; default: throw new IllegalStateException( "The slot stat should not be FREE under fine-grained resource management."); } slots.put(allocationId, taskManagerSlot); idleSince = Long.MAX_VALUE; }
@Test void testNotifyAllocation() { final ResourceProfile totalResource = ResourceProfile.fromResources(10, 1000); final FineGrainedTaskManagerRegistration taskManager = new FineGrainedTaskManagerRegistration( TASK_EXECUTOR_CONNECTION, totalResource, totalResource); final AllocationID allocationId = new AllocationID(); final JobID jobId = new JobID(); final FineGrainedTaskManagerSlot slot = new FineGrainedTaskManagerSlot( allocationId, jobId, ResourceProfile.fromResources(2, 100), TASK_EXECUTOR_CONNECTION, SlotState.ALLOCATED); taskManager.notifyAllocation(allocationId, slot); assertThat(taskManager.getAvailableResource()) .isEqualTo(ResourceProfile.fromResources(8, 900)); assertThat(taskManager.getIdleSince()).isEqualTo(Long.MAX_VALUE); assertThat(taskManager.getAllocatedSlots()).containsKey(allocationId); }
public static JibContainerBuilder toJibContainerBuilder( Path projectRoot, Path buildFilePath, Build buildCommandOptions, CommonCliOptions commonCliOptions, ConsoleLogger logger) throws InvalidImageReferenceException, IOException { BuildFileSpec buildFile = toBuildFileSpec(buildFilePath, buildCommandOptions.getTemplateParameters()); Optional<BaseImageSpec> baseImageSpec = buildFile.getFrom(); JibContainerBuilder containerBuilder = baseImageSpec.isPresent() ? createJibContainerBuilder(baseImageSpec.get(), commonCliOptions, logger) : Jib.fromScratch(); buildFile.getCreationTime().ifPresent(containerBuilder::setCreationTime); buildFile.getFormat().ifPresent(containerBuilder::setFormat); containerBuilder.setEnvironment(buildFile.getEnvironment()); containerBuilder.setLabels(buildFile.getLabels()); containerBuilder.setVolumes(buildFile.getVolumes()); containerBuilder.setExposedPorts(buildFile.getExposedPorts()); buildFile.getUser().ifPresent(containerBuilder::setUser); buildFile.getWorkingDirectory().ifPresent(containerBuilder::setWorkingDirectory); buildFile.getEntrypoint().ifPresent(containerBuilder::setEntrypoint); buildFile.getCmd().ifPresent(containerBuilder::setProgramArguments); Optional<LayersSpec> layersSpec = buildFile.getLayers(); if (layersSpec.isPresent()) { containerBuilder.setFileEntriesLayers(Layers.toLayers(projectRoot, layersSpec.get())); } return containerBuilder; }
@Test public void testToBuildFileSpec_templateMultiLineBehavior() throws URISyntaxException, InvalidImageReferenceException, IOException { Path buildfile = Paths.get(Resources.getResource("buildfiles/projects/templating/multiLine.yaml").toURI()); Mockito.when(buildCli.getTemplateParameters()) .thenReturn(ImmutableMap.of("replace" + "\n" + "this", "creationTime: 1234")); JibContainerBuilder jibContainerBuilder = BuildFiles.toJibContainerBuilder( buildfile.getParent(), buildfile, buildCli, commonCliOptions, consoleLogger); ContainerBuildPlan resolved = jibContainerBuilder.toContainerBuildPlan(); Assert.assertEquals(Instant.ofEpochMilli(1234), resolved.getCreationTime()); }
@Override public GetApplicationReportResponse getApplicationReport( GetApplicationReportRequest request) throws YarnException, IOException { ApplicationId applicationId = request.getApplicationId(); try { GetApplicationReportResponse response = GetApplicationReportResponse.newInstance(history .getApplication(applicationId)); return response; } catch (IOException e) { LOG.error(e.getMessage(), e); throw e; } }
@Test void testApplicationNotFound() throws IOException, YarnException { ApplicationId appId = null; appId = ApplicationId.newInstance(0, MAX_APPS + 1); GetApplicationReportRequest request = GetApplicationReportRequest.newInstance(appId); try { @SuppressWarnings("unused") GetApplicationReportResponse response = clientService.getApplicationReport(request); fail("Exception should have been thrown before we reach here."); } catch (ApplicationNotFoundException e) { //This exception is expected. assertTrue(e.getMessage().contains( "doesn't exist in the timeline store")); } catch (Exception e) { fail("Undesired exception caught"); } }
@Override public boolean tryTransfer(V v) { RemotePromise<Void> future = (RemotePromise<Void>) service.invoke(v).toCompletableFuture(); boolean added = commandExecutor.get(future.getAddFuture()); if (added && !future.cancel(false)) { commandExecutor.get(future); return true; } return false; }
@Test public void testTryTransfer() throws InterruptedException, ExecutionException { RTransferQueue<Integer> queue1 = redisson.getTransferQueue("queue"); AtomicBoolean takeExecuted = new AtomicBoolean(); ScheduledFuture<?> f = Executors.newSingleThreadScheduledExecutor().schedule(() -> { RTransferQueue<Integer> queue = redisson.getTransferQueue("queue"); boolean res = queue.tryTransfer(3); assertThat(takeExecuted.get()).isTrue(); assertThat(res).isTrue(); boolean res2 = queue.tryTransfer(4); assertThat(res2).isFalse(); }, 4, TimeUnit.SECONDS); Awaitility.await().atLeast(Duration.ofMillis(3900)).untilAsserted(() -> { int l = queue1.take(); takeExecuted.set(true); assertThat(l).isEqualTo(3); }); f.get(); assertThat(queue1.size()).isZero(); assertThat(queue1.peek()).isNull(); }
public GoPluginBundleDescriptor build(BundleOrPluginFileDetails bundleOrPluginJarFile) { if (!bundleOrPluginJarFile.exists()) { throw new RuntimeException(format("Plugin or bundle jar does not exist: %s", bundleOrPluginJarFile.file())); } String defaultId = bundleOrPluginJarFile.file().getName(); GoPluginBundleDescriptor goPluginBundleDescriptor = new GoPluginBundleDescriptor(GoPluginDescriptor.builder() .version("1") .id(defaultId) .bundleLocation(bundleOrPluginJarFile.extractionLocation()) .pluginJarFileLocation(bundleOrPluginJarFile.file().getAbsolutePath()) .isBundledPlugin(bundleOrPluginJarFile.isBundledPlugin()) .build()); try { if (bundleOrPluginJarFile.isBundleJar()) { return GoPluginBundleDescriptorParser.parseXML(bundleOrPluginJarFile.getBundleXml(), bundleOrPluginJarFile); } if (bundleOrPluginJarFile.isPluginJar()) { return GoPluginDescriptorParser.parseXML(bundleOrPluginJarFile.getPluginXml(), bundleOrPluginJarFile); } goPluginBundleDescriptor.markAsInvalid(List.of(format("Plugin with ID (%s) is not valid. The plugin does not seem to contain plugin.xml or gocd-bundle.xml", defaultId)), new RuntimeException("The plugin does not seem to contain plugin.xml or gocd-bundle.xml")); } catch (Exception e) { log.warn("Unable to load the jar file {}", bundleOrPluginJarFile.file(), e); final String message = requireNonNullElse(e.getMessage(), e.getClass().getCanonicalName()); String cause = e.getCause() != null ? format("%s. Cause: %s", message, e.getCause().getMessage()) : message; goPluginBundleDescriptor.markAsInvalid(List.of(format("Plugin with ID (%s) is not valid: %s", defaultId, cause)), e); } return goPluginBundleDescriptor; }
@Test void shouldCheckForBundleXMLFirst() throws Exception { String pluginJarName = "test-plugin-with-both-bundle-and-plugin-xmls.jar"; copyPluginToThePluginDirectory(pluginDirectory, pluginJarName); File pluginJarFile = new File(pluginDirectory, pluginJarName); BundleOrPluginFileDetails bundleOrPluginFileDetails = new BundleOrPluginFileDetails(pluginJarFile, true, pluginDirectory); final GoPluginBundleDescriptor bundleDescriptor = goPluginBundleDescriptorBuilder.build(bundleOrPluginFileDetails); GoPluginBundleDescriptor expectedDescriptor = buildExpectedMultiPluginBundleDescriptor(pluginJarName, pluginJarFile.getAbsolutePath()); assertThat(bundleDescriptor).isEqualTo(expectedDescriptor); assertThat(bundleDescriptor.isInvalid()).isFalse(); assertThat(bundleDescriptor.isBundledPlugin()).isTrue(); }
public LogoutRequestModel parseLogoutRequest(HttpServletRequest request) throws SamlValidationException, SamlParseException, SamlSessionException, DienstencatalogusException { final LogoutRequestModel logoutRequestModel = new LogoutRequestModel(); try { final BaseHttpServletRequestXMLMessageDecoder decoder = decodeRequest(request); var logoutRequest = (LogoutRequest) decoder.getMessageContext().getMessage(); final SAMLBindingContext bindingContext = decoder.getMessageContext().getSubcontext(SAMLBindingContext.class); logoutRequestModel.setLogoutRequest(logoutRequest); logoutRequestModel.setRequest(request); validateRequest(logoutRequestModel); var id = logoutRequest.getNameID() != null ? logoutRequest.getNameID().getValue() : logoutRequest.getSessionIndexes().get(0).getValue(); var samlSession = samlSessionRepository.findById(id) .orElseThrow(() -> new SamlSessionException("LogoutRequest no saml session found for nameID: " + id)); logoutRequestModel.setConnectionEntityId(samlSession.getConnectionEntityId()); logoutRequestModel.setServiceEntityId(samlSession.getServiceEntityId()); logoutRequestModel.setServiceUuid(samlSession.getServiceUuid()); logoutRequestModel.setRelayState(bindingContext.getRelayState()); logoutRequestModel.setEntranceSession(samlSession.getProtocolType().equals(ProtocolType.SAML_COMBICONNECT)); dcMetadataService.resolveDcMetadata(logoutRequestModel); if (!logoutRequestModel.getConnectionEntityId().equals(logoutRequestModel.getLogoutRequest().getIssuer().getValue())) { throw new SamlValidationException("Issuer not equal to connectorEntityId"); } verifySignature(logoutRequestModel, logoutRequestModel.getLogoutRequest().getSignature()); logout(samlSession); if (logger.isDebugEnabled()) OpenSAMLUtils.logSAMLObject((LogoutRequest) decoder.getMessageContext().getMessage()); } catch (MessageDecodingException e) { throw new SamlParseException("Authentication deflate decode exception", e); } catch (ComponentInitializationException e) { throw new SamlParseException("Authentication deflate initialization exception", e); } return logoutRequestModel; }
@Test public void logoutRequestSuccessful() { when(samlSessionRepositoryMock.findById(anyString())).thenReturn(Optional.of(createSamlSession("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS"))); assertDoesNotThrow(() -> logoutService.parseLogoutRequest(httpRequestMock)); }
public void save() { try { Optional<Product> product = productDao.findByName(name); if (product.isPresent()) { productDao.update(this); } else { productDao.save(this); } } catch (SQLException ex) { LOGGER.error(ex.getMessage()); } }
@Test void shouldSaveProduct() throws SQLException { when(productDao.findByName("product")).thenReturn(Optional.empty()); product.save(); verify(productDao, times(1)).save(product); when(productDao.findByName("product")).thenReturn(Optional.of(product)); product.save(); verify(productDao, times(1)).update(product); }
public void setWantClientAuth(Boolean wantClientAuth) { this.wantClientAuth = wantClientAuth; }
@Test public void testSetWantClientAuth() throws Exception { configuration.setWantClientAuth(true); configuration.configure(configurable); assertTrue(configurable.isWantClientAuth()); }
public void formatSource(CharSource input, CharSink output) throws FormatterException, IOException { // TODO(cushon): proper support for streaming input/output. Input may // not be feasible (parsing) but output should be easier. output.write(formatSource(input.read())); }
@Test public void dontWrapMoeLineComments() throws Exception { assertThat( new Formatter() .formatSource( "class T {\n" + " // MOE: one long incredibly" + " unbroken sentence moving from topic to topic so that no-one had a" + " chance to interrupt;\n" + "}\n")) .isEqualTo( "class T {\n" + " // MOE: one long incredibly" + " unbroken sentence moving from topic to topic so that no-one had a" + " chance to interrupt;\n" + "}\n"); }
protected abstract String getGroupId(final T request);
@Test public void testOK() { Node node = Mockito.mock(Node.class, withSettings().extraInterfaces(RaftServerService.class)); Mockito.when(node.getGroupId()).thenReturn("test"); PeerId peerId = new PeerId("localhost", 8081); Mockito.when(node.getNodeId()).thenReturn(new NodeId("test", peerId)); NodeManager.getInstance().addAddress(peerId.getEndpoint()); NodeManager.getInstance().add(node); this.processor.handleRequest(asyncContext, TestUtils.createPingRequest()); ErrorResponse resp = (ErrorResponse) asyncContext.getResponseObject(); assertNotNull(resp); assertEquals(0, resp.getErrorCode()); }
public static <K> KTableHolder<K> build( final KTableHolder<K> left, final KTableHolder<K> right, final TableTableJoin<K> join ) { final LogicalSchema leftSchema; final LogicalSchema rightSchema; if (join.getJoinType().equals(RIGHT)) { leftSchema = right.getSchema(); rightSchema = left.getSchema(); } else { leftSchema = left.getSchema(); rightSchema = right.getSchema(); } final JoinParams joinParams = JoinParamsFactory .create(join.getKeyColName(), leftSchema, rightSchema); final KTable<K, GenericRow> result; switch (join.getJoinType()) { case INNER: result = left.getTable().join(right.getTable(), joinParams.getJoiner()); break; case LEFT: result = left.getTable().leftJoin(right.getTable(), joinParams.getJoiner()); break; case RIGHT: result = right.getTable().leftJoin(left.getTable(), joinParams.getJoiner()); break; case OUTER: result = left.getTable().outerJoin(right.getTable(), joinParams.getJoiner()); break; default: throw new IllegalStateException("invalid join type: " + join.getJoinType()); } return KTableHolder.unmaterialized( result, joinParams.getSchema(), left.getExecutionKeyFactory()); }
@Test public void shouldReturnCorrectSchemaWithSyntheticKey() { // Given: givenInnerJoin(SYNTH_KEY); // When: final KTableHolder<Struct> result = join.build(planBuilder, planInfo); // Then: assertThat( result.getSchema(), is(LogicalSchema.builder() .keyColumn(SYNTH_KEY, SqlTypes.STRING) .valueColumns(LEFT_SCHEMA.value()) .valueColumns(RIGHT_SCHEMA.value()) .valueColumn(SYNTH_KEY, SqlTypes.STRING) .build()) ); }
@Override public Iterable<RedisClusterNode> clusterGetNodes() { return read(null, StringCodec.INSTANCE, CLUSTER_NODES); }
@Test public void testClusterGetNodes() { Iterable<RedisClusterNode> nodes = connection.clusterGetNodes(); assertThat(nodes).hasSize(6); for (RedisClusterNode redisClusterNode : nodes) { assertThat(redisClusterNode.getLinkState()).isNotNull(); assertThat(redisClusterNode.getFlags()).isNotEmpty(); assertThat(redisClusterNode.getHost()).isNotNull(); assertThat(redisClusterNode.getPort()).isNotNull(); assertThat(redisClusterNode.getId()).isNotNull(); assertThat(redisClusterNode.getType()).isNotNull(); if (redisClusterNode.getType() == NodeType.MASTER) { assertThat(redisClusterNode.getSlotRange().getSlots()).isNotEmpty(); } else { assertThat(redisClusterNode.getMasterId()).isNotNull(); } } }
public double calculateAveragePercentageUsedBy(NormalizedResources used, double totalMemoryMb, double usedMemoryMb) { int skippedResourceTypes = 0; double total = 0.0; if (usedMemoryMb > totalMemoryMb) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } if (totalMemoryMb != 0.0) { total += usedMemoryMb / totalMemoryMb; } else { skippedResourceTypes++; } double totalCpu = getTotalCpu(); if (used.getTotalCpu() > getTotalCpu()) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } if (totalCpu != 0.0) { total += used.getTotalCpu() / getTotalCpu(); } else { skippedResourceTypes++; } if (used.otherResources.length > otherResources.length) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } for (int i = 0; i < otherResources.length; i++) { double totalValue = otherResources[i]; double usedValue; if (i >= used.otherResources.length) { //Resources missing from used are using none of that resource usedValue = 0.0; } else { usedValue = used.otherResources[i]; } if (usedValue > totalValue) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } if (totalValue == 0.0) { //Skip any resources where the total is 0, the percent used for this resource isn't meaningful. //We fall back to prioritizing by cpu, memory and any other resources by ignoring this value skippedResourceTypes++; continue; } total += usedValue / totalValue; } //Adjust the divisor for the average to account for any skipped resources (those where the total was 0) int divisor = 2 + otherResources.length - skippedResourceTypes; if (divisor == 0) { /* * This is an arbitrary choice to make the result consistent with calculateMin. Any value would be valid here, becase there are * no (non-zero) resources in the total set of resources, so we're trying to average 0 values. */ return 100.0; } else { return (total * 100.0) / divisor; } }
@Test public void testCalculateAvgThrowsIfTotalIsMissingCpu() { NormalizedResources resources = new NormalizedResources(normalize(Collections.singletonMap(Constants.COMMON_CPU_RESOURCE_NAME, 2))); NormalizedResources usedResources = new NormalizedResources(normalize(Collections.singletonMap(Constants.COMMON_CPU_RESOURCE_NAME, 5))); assertThrows(IllegalArgumentException.class, () -> resources.calculateAveragePercentageUsedBy(usedResources, 0, 0)); }
public static Map.Entry<String, String> getClassificationTableBuilder(final RegressionCompilationDTO compilationDTO, final LinkedHashMap<String, KiePMMLTableSourceCategory> regressionTablesMap) { logger.trace("getRegressionTableBuilder {}", regressionTablesMap); String className = "KiePMMLClassificationTable" + classArity.addAndGet(1); CompilationUnit cloneCU = JavaParserUtils.getKiePMMLModelCompilationUnit(className, compilationDTO.getPackageName(), KIE_PMML_CLASSIFICATION_TABLE_TEMPLATE_JAVA, KIE_PMML_CLASSIFICATION_TABLE_TEMPLATE); ClassOrInterfaceDeclaration tableTemplate = cloneCU.getClassByName(className) .orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + className)); final MethodDeclaration staticGetterMethod = tableTemplate.getMethodsByName(GETKIEPMML_TABLE).get(0); setStaticGetter(compilationDTO, regressionTablesMap, staticGetterMethod, className.toLowerCase()); return new AbstractMap.SimpleEntry<>(getFullClassName(cloneCU), cloneCU.toString()); }
@Test void getClassificationTableBuilder() { RegressionTable regressionTableProf = getRegressionTable(3.5, "professional"); RegressionTable regressionTableCler = getRegressionTable(27.4, "clerical"); OutputField outputFieldCat = getOutputField("CAT-1", ResultFeature.PROBABILITY, "CatPred-1"); OutputField outputFieldNum = getOutputField("NUM-1", ResultFeature.PROBABILITY, "NumPred-0"); OutputField outputFieldPrev = getOutputField("PREV", ResultFeature.PREDICTED_VALUE, null); String targetField = "targetField"; DataField dataField = new DataField(); dataField.setName(targetField); dataField.setOpType(OpType.CATEGORICAL); DataDictionary dataDictionary = new DataDictionary(); dataDictionary.addDataFields(dataField); RegressionModel regressionModel = new RegressionModel(); regressionModel.setNormalizationMethod(RegressionModel.NormalizationMethod.CAUCHIT); regressionModel.addRegressionTables(regressionTableProf, regressionTableCler); regressionModel.setModelName(getGeneratedClassName("RegressionModel")); Output output = new Output(); output.addOutputFields(outputFieldCat, outputFieldNum, outputFieldPrev); regressionModel.setOutput(output); MiningField miningField = new MiningField(); miningField.setUsageType(MiningField.UsageType.TARGET); miningField.setName(dataField.getName()); MiningSchema miningSchema = new MiningSchema(); miningSchema.addMiningFields(miningField); regressionModel.setMiningSchema(miningSchema); PMML pmml = new PMML(); pmml.setDataDictionary(dataDictionary); pmml.addModels(regressionModel); final CommonCompilationDTO<RegressionModel> source = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, regressionModel, new PMMLCompilationContextMock(), "FILENAME"); final RegressionCompilationDTO compilationDTO = RegressionCompilationDTO.fromCompilationDTORegressionTablesAndNormalizationMethod(source, regressionModel.getRegressionTables(), regressionModel.getNormalizationMethod()); final LinkedHashMap<String, KiePMMLTableSourceCategory> regressionTablesMap = new LinkedHashMap<>(); regressionModel.getRegressionTables().forEach(regressionTable -> { String key = compilationDTO.getPackageName() + "." + regressionTable.getTargetCategory().toString().toUpperCase(); KiePMMLTableSourceCategory value = new KiePMMLTableSourceCategory("", regressionTable.getTargetCategory().toString()); regressionTablesMap.put(key, value); }); Map.Entry<String, String> retrieved = KiePMMLClassificationTableFactory.getClassificationTableBuilder(compilationDTO, regressionTablesMap); assertThat(retrieved).isNotNull(); }
public static Optional<Object> getAdjacentValue(Type type, Object value, boolean isPrevious) { if (!type.isOrderable()) { throw new IllegalStateException("Type is not orderable: " + type); } requireNonNull(value, "value is null"); if (type.equals(BIGINT) || type instanceof TimestampType) { return getBigintAdjacentValue(value, isPrevious); } if (type.equals(INTEGER) || type.equals(DATE)) { return getIntegerAdjacentValue(value, isPrevious); } if (type.equals(SMALLINT)) { return getSmallIntAdjacentValue(value, isPrevious); } if (type.equals(TINYINT)) { return getTinyIntAdjacentValue(value, isPrevious); } if (type.equals(DOUBLE)) { return getDoubleAdjacentValue(value, isPrevious); } if (type.equals(REAL)) { return getRealAdjacentValue(value, isPrevious); } return Optional.empty(); }
@Test public void testPreviousAndNextValueForReal() { assertThat(getAdjacentValue(REAL, REAL_NEGATIVE_INFINITE, true)) .isEqualTo(Optional.empty()); assertThat(intBitsToFloat((int) getAdjacentValue(REAL, getAdjacentValue(REAL, REAL_NEGATIVE_INFINITE, false).get(), true).get())) .isEqualTo(Float.NEGATIVE_INFINITY); assertThat(getAdjacentValue(REAL, REAL_POSITIVE_ZERO, true)) .isEqualTo(getAdjacentValue(REAL, REAL_NEGATIVE_ZERO, true)); assertThat(intBitsToFloat((int) getAdjacentValue(REAL, getAdjacentValue(REAL, REAL_NEGATIVE_ZERO, false).get(), true).get())) .isEqualTo(0.0f); assertThat(getAdjacentValue(REAL, getAdjacentValue(REAL, REAL_NEGATIVE_ZERO, false).get(), true).get()) .isEqualTo(REAL_POSITIVE_ZERO); assertThat(intBitsToFloat((int) getAdjacentValue(REAL, getAdjacentValue(REAL, REAL_POSITIVE_ZERO, true).get(), false).get())) .isEqualTo(0.0f); assertThat(getAdjacentValue(REAL, getAdjacentValue(REAL, REAL_POSITIVE_ZERO, true).get(), false).get()) .isEqualTo(REAL_POSITIVE_ZERO); assertThat(getAdjacentValue(REAL, REAL_POSITIVE_INFINITE, false)) .isEqualTo(Optional.empty()); assertThat(intBitsToFloat((int) getAdjacentValue(REAL, getAdjacentValue(REAL, REAL_POSITIVE_INFINITE, true).get(), false).get())) .isEqualTo(Float.POSITIVE_INFINITY); }
@Override public Module createModule(Context context) { final FactoryUtil.ModuleFactoryHelper factoryHelper = FactoryUtil.createModuleFactoryHelper(this, context); factoryHelper.validate(); final String hiveVersion = factoryHelper .getOptions() .getOptional(HIVE_VERSION) .orElseGet(HiveShimLoader::getHiveVersion); return new HiveModule(hiveVersion, context.getConfiguration(), context.getClassLoader()); }
@Test public void test() { final HiveModule expected = new HiveModule(); final Module actualModule = FactoryUtil.createModule( HiveModuleFactory.IDENTIFIER, Collections.emptyMap(), new Configuration(), Thread.currentThread().getContextClassLoader()); checkEquals(expected, (HiveModule) actualModule); }
static List<Locale> negotiatePreferredLocales(String headerValue) { if (headerValue == null || headerValue.isBlank()) { headerValue = DEFAULT_LOCALE.toLanguageTag(); } try { var languageRanges = Locale.LanguageRange.parse(headerValue); return Locale.filter(languageRanges, supportedLocales); } catch (IllegalArgumentException e) { throw new ValidationException(new Message("error.unparsableHeader")); } }
@Test void test_mockMultipleRequestWithZeroLoadTimeForSupportedLocale() { var startTime = System.currentTimeMillis(); LocaleUtils.negotiatePreferredLocales("de-DE,de,en,it;q=0.5"); var endTime = System.currentTimeMillis(); var executionTime = endTime - startTime; assertThat(executionTime, lessThanOrEqualTo(1L)); // Mock second request startTime = System.currentTimeMillis(); LocaleUtils.negotiatePreferredLocales("de-DE;q=0.8,en-US;q=0.9,de,en,it;q=0.5"); endTime = System.currentTimeMillis(); executionTime = endTime - startTime; assertThat(executionTime, lessThanOrEqualTo(1L)); }
@Override public ProxyInvocationHandler parserInterfaceToProxy(Object target, String objectName) { // eliminate the bean without two phase annotation. Set<String> methodsToProxy = this.tccProxyTargetMethod(target); if (methodsToProxy.isEmpty()) { return null; } // register resource and enhance with interceptor DefaultResourceRegisterParser.get().registerResource(target, objectName); return new TccActionInterceptorHandler(target, methodsToProxy); }
@Test void parserInterfaceToProxy() { //given TccActionInterceptorParser tccActionInterceptorParser = new TccActionInterceptorParser(); NormalTccActionImpl tccAction = new NormalTccActionImpl(); //when ProxyInvocationHandler proxyInvocationHandler = tccActionInterceptorParser.parserInterfaceToProxy(tccAction, tccAction.getClass().getName()); //then Assertions.assertNotNull(proxyInvocationHandler); }
public ComputeNodeInstance loadComputeNodeInstance(final InstanceMetaData instanceMetaData) { ComputeNodeInstance result = new ComputeNodeInstance(instanceMetaData); result.getLabels().addAll(loadInstanceLabels(instanceMetaData.getId())); InstanceState.get(loadComputeNodeState(instanceMetaData.getId())).ifPresent(result::switchState); loadInstanceWorkerId(instanceMetaData.getId()).ifPresent(result::setWorkerId); return result; }
@Test void assertLoadComputeNodeInstance() { InstanceMetaData instanceMetaData = new ProxyInstanceMetaData("foo_instance_id", 3307); ComputeNodeInstance actual = new ComputeNodePersistService(repository).loadComputeNodeInstance(instanceMetaData); assertThat(actual.getMetaData(), is(instanceMetaData)); }
@Override public void advanceWatermark(long time) throws Exception { currentWatermark = time; InternalTimer<K, N> timer; while ((timer = eventTimeTimersQueue.peek()) != null && timer.getTimestamp() <= time && !cancellationContext.isCancelled()) { eventTimeTimersQueue.poll(); final InternalTimer<K, N> timerToTrigger = timer; maintainContextAndProcess( timerToTrigger, () -> triggerTarget.onEventTime(timerToTrigger)); taskIOMetricGroup.getNumFiredTimers().inc(); } }
@Test void testEventTimerFireOrder() throws Exception { keyContext.setCurrentKey("key-1"); service.registerEventTimeTimer("event-timer-1", 1L); TestTriggerable testTriggerable = new TestTriggerable(); service.startTimerService( IntSerializer.INSTANCE, StringSerializer.INSTANCE, testTriggerable); assertThat(testTriggerable.eventTriggerCount).isEqualTo(0); // the event timer should be triggered at watermark 1 service.advanceWatermark(1L); assertThat(testTriggerable.eventTriggerCount).isEqualTo(1); keyContext.setCurrentKey("key-2"); service.registerEventTimeTimer("event-timer-2", 2L); assertThat(testTriggerable.eventTriggerCount).isEqualTo(1); RecordContext<String> recordContext = asyncExecutionController.buildContext("record2", "key-2"); asyncExecutionController.setCurrentContext(recordContext); asyncExecutionController.handleRequest(null, StateRequestType.VALUE_GET, null); service.advanceWatermark(2L); // timer fire is blocked by record2's value_get assertThat(testTriggerable.eventTriggerCount).isEqualTo(1); // record2 finished, key-2 is released, timer fire can be triggered recordContext.release(); service.advanceWatermark(3L); assertThat(testTriggerable.eventTriggerCount).isEqualTo(2); }
@Override public void seek(long pos) throws IOException { if (pos < 0) { throw new EOFException(FSExceptionMessages.NEGATIVE_SEEK); } if (pos > this.fileSize) { throw new EOFException(FSExceptionMessages.CANNOT_SEEK_PAST_EOF); } if (this.position == pos) { return; } if (pos > position && pos < this.position + partRemaining) { long len = pos - this.position; this.position = pos; this.partRemaining -= len; } else { this.reopen(pos); } }
@Test public void testSeek() throws Exception { Path seekTestFilePath = new Path(this.testRootDir + "/" + "seekTestFile"); long fileSize = 5 * Unit.MB; ContractTestUtils.generateTestFile( this.fs, seekTestFilePath, fileSize, 256, 255); LOG.info("5MB file for seek test has created."); FSDataInputStream inputStream = this.fs.open(seekTestFilePath); int seekTimes = 5; for (int i = 0; i != seekTimes; i++) { long pos = fileSize / (seekTimes - i) - 1; inputStream.seek(pos); assertTrue("expected position at: " + pos + ", but got: " + inputStream.getPos(), inputStream.getPos() == pos); LOG.info("completed seeking at pos: " + inputStream.getPos()); } LOG.info("begin to random position seeking test..."); Random random = new Random(); for (int i = 0; i < seekTimes; i++) { long pos = Math.abs(random.nextLong()) % fileSize; LOG.info("seeking for pos: " + pos); inputStream.seek(pos); assertTrue("expected position at: " + pos + ", but got: " + inputStream.getPos(), inputStream.getPos() == pos); LOG.info("completed seeking at pos: " + inputStream.getPos()); } }
@GetMapping() @Secured(action = ActionTypes.READ, resource = "nacos/admin") public Result<ObjectNode> getClientDetail(@RequestParam("clientId") String clientId) throws NacosApiException { checkClientId(clientId); Client client = clientManager.getClient(clientId); ObjectNode result = JacksonUtils.createEmptyJsonNode(); result.put("clientId", client.getClientId()); result.put("ephemeral", client.isEphemeral()); result.put("lastUpdatedTime", client.getLastUpdatedTime()); if (client instanceof ConnectionBasedClient) { // 2.x client result.put("clientType", "connection"); Connection connection = connectionManager.getConnection(clientId); ConnectionMeta connectionMetaInfo = connection.getMetaInfo(); result.put("connectType", connectionMetaInfo.getConnectType()); result.put("appName", connectionMetaInfo.getAppName()); result.put("version", connectionMetaInfo.getVersion()); result.put("clientIp", connectionMetaInfo.getClientIp()); result.put("clientPort", clientId.substring(clientId.lastIndexOf('_') + 1)); } else if (client instanceof IpPortBasedClient) { // 1.x client result.put("clientType", "ipPort"); IpPortBasedClient ipPortBasedClient = (IpPortBasedClient) client; String responsibleId = ipPortBasedClient.getResponsibleId(); int idx = responsibleId.lastIndexOf(':'); result.put("clientIp", responsibleId.substring(0, idx)); result.put("clientPort", responsibleId.substring(idx + 1)); } return Result.success(result); }
@Test void testGetClientDetail() throws Exception { when(clientManager.getClient("test1")).thenReturn(ipPortBasedClient); MockHttpServletRequestBuilder mockHttpServletRequestBuilder = MockMvcRequestBuilders.get(URL).param("clientId", "test1"); MockHttpServletResponse response = mockmvc.perform(mockHttpServletRequestBuilder).andReturn().getResponse(); assertEquals(200, response.getStatus()); }
@Nonnull public static <T> AggregateOperation1<T, DoubleAccumulator, Double> summingDouble( @Nonnull ToDoubleFunctionEx<? super T> getDoubleValueFn ) { checkSerializable(getDoubleValueFn, "getDoubleValueFn"); return AggregateOperation .withCreate(DoubleAccumulator::new) .andAccumulate((DoubleAccumulator a, T item) -> a.accumulate(getDoubleValueFn.applyAsDouble(item))) .andCombine(DoubleAccumulator::combine) .andDeduct(DoubleAccumulator::deduct) .andExportFinish(DoubleAccumulator::export); }
@Test public void when_summingDouble() { validateOp(summingDouble(Double::doubleValue), DoubleAccumulator::export, 0.5, 1.5, 0.5, 2.0, 2.0); }
@Override public boolean hasChangesetForLine(int lineNumber) { return lineNumber > 0 && lineNumber - 1 < lineChangesets.length && lineChangesets[lineNumber - 1] != null; }
@Test public void exists_for_given_line() { ScmInfo scmInfo = createScmInfoWithTwoChangestOnFourLines(); assertThat(scmInfo.hasChangesetForLine(1)).isTrue(); assertThat(scmInfo.hasChangesetForLine(5)).isFalse(); }
public boolean isEmpty() { return stream.size() == 0; }
@Test public void testIsEmpty() { Assertions.assertTrue(writer.isEmpty()); }
public Page appendColumn(Block block) { requireNonNull(block, "block is null"); if (positionCount != block.getPositionCount()) { throw new IllegalArgumentException("Block does not have same position count"); } Block[] newBlocks = Arrays.copyOf(blocks, blocks.length + 1); newBlocks[blocks.length] = block; return wrapBlocksWithoutCopy(positionCount, newBlocks); }
@Test(expectedExceptions = IllegalArgumentException.class) public void testAppendColumnsWrongNumberOfRows() { int entries = 10; BlockBuilder blockBuilder = BIGINT.createBlockBuilder(null, entries); for (int i = 0; i < entries; i++) { BIGINT.writeLong(blockBuilder, i); } Block block = blockBuilder.build(); Page page = new Page(block, block); BlockBuilder newBlockBuilder = BIGINT.createBlockBuilder(null, entries - 5); for (int i = 0; i < entries - 5; i++) { BIGINT.writeLong(newBlockBuilder, -i); } Block newBlock = newBlockBuilder.build(); page.appendColumn(newBlock); }
public static void deleteDirectory(File directory) throws IOException { checkNotNull(directory, "directory"); guardIfNotThreadSafe(FileUtils::deleteDirectoryInternal, directory); }
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser") @Test void testDeleteProtectedDirectory() throws Exception { // deleting a write protected file should throw an error File cannotDeleteParent = TempDirUtils.newFolder(temporaryFolder); File cannotDeleteChild = new File(cannotDeleteParent, "child"); try { assumeThat(cannotDeleteChild.createNewFile()).isTrue(); assumeThat(cannotDeleteParent.setWritable(false)).isTrue(); assumeThat(cannotDeleteChild.setWritable(false)).isTrue(); assertThatThrownBy(() -> FileUtils.deleteDirectory(cannotDeleteParent)) .isInstanceOf(AccessDeniedException.class); } finally { //noinspection ResultOfMethodCallIgnored cannotDeleteParent.setWritable(true); //noinspection ResultOfMethodCallIgnored cannotDeleteChild.setWritable(true); } }
public static Write write() { return new AutoValue_RedisIO_Write.Builder() .setConnectionConfiguration(RedisConnectionConfiguration.create()) .setMethod(Write.Method.APPEND) .build(); }
@Test public void testWriteWithMethodSet() { String key = "testWriteWithMethodSet"; client.set(key, "value"); String newValue = "newValue"; PCollection<KV<String, String>> write = p.apply(Create.of(KV.of(key, newValue))); write.apply(RedisIO.write().withEndpoint(REDIS_HOST, port).withMethod(Method.SET)); p.run(); assertEquals(newValue, client.get(key)); assertEquals(NO_EXPIRATION, Long.valueOf(client.ttl(key))); }
@Override public CompletableFuture<RemotingCommand> invoke(String addr, RemotingCommand request, long timeoutMillis) { CompletableFuture<RemotingCommand> future = new CompletableFuture<>(); try { final ChannelFuture channelFuture = this.getAndCreateChannelAsync(addr); if (channelFuture == null) { future.completeExceptionally(new RemotingConnectException(addr)); return future; } channelFuture.addListener(f -> { if (f.isSuccess()) { Channel channel = channelFuture.channel(); if (channel != null && channel.isActive()) { invokeImpl(channel, request, timeoutMillis).whenComplete((v, t) -> { if (t == null) { updateChannelLastResponseTime(addr); } }).thenApply(ResponseFuture::getResponseCommand).whenComplete((v, t) -> { if (t != null) { future.completeExceptionally(t); } else { future.complete(v); } }); } else { this.closeChannel(addr, channel); future.completeExceptionally(new RemotingConnectException(addr)); } } else { future.completeExceptionally(new RemotingConnectException(addr)); } }); } catch (Throwable t) { future.completeExceptionally(t); } return future; }
@Test public void testInvokeResponse() throws Exception { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.PULL_MESSAGE, null); RemotingCommand response = RemotingCommand.createResponseCommand(null); response.setCode(ResponseCode.SUCCESS); ResponseFuture responseFuture = new ResponseFuture(null, request.getOpaque(), 3 * 1000, null, null); responseFuture.setResponseCommand(response); CompletableFuture<RemotingCommand> future0 = new CompletableFuture<>(); future0.complete(responseFuture.getResponseCommand()); doReturn(future0).when(remotingClient).invoke(anyString(), any(RemotingCommand.class), anyLong()); CompletableFuture<RemotingCommand> future = remotingClient.invoke("0.0.0.0", request, 1000); RemotingCommand actual = future.get(); assertThat(actual).isEqualTo(response); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatCreateStreamStatementWithImplicitKey() { // Given: final CreateStream createStream = new CreateStream( TEST, ELEMENTS_WITHOUT_KEY, false, false, SOME_WITH_PROPS, false); // When: final String sql = SqlFormatter.formatSql(createStream); // Then: assertThat(sql, is("CREATE STREAM TEST (`Foo` STRING, `Bar` STRING) " + "WITH (KAFKA_TOPIC='topic_test', VALUE_FORMAT='JSON');")); }
public static void checkArgument(boolean b) { if (!b) { throw new IllegalArgumentException(); } }
@Test public void testPreconditionsMalformed(){ //No %s: Preconditions.checkArgument(true, "This is malformed", "A", "B", "C"); try{ Preconditions.checkArgument(false, "This is malformed", "A", "B", "C"); } catch (IllegalArgumentException e){ assertEquals("This is malformed [A,B,C]", e.getMessage()); } //More args than %s: Preconditions.checkArgument(true, "This is %s malformed", "A", "B", "C"); try{ Preconditions.checkArgument(false, "This is %s malformed", "A", "B", "C"); } catch (IllegalArgumentException e){ assertEquals("This is A malformed [B,C]", e.getMessage()); } //No args Preconditions.checkArgument(true, "This is %s %s malformed"); try{ Preconditions.checkArgument(false, "This is %s %s malformed"); } catch (IllegalArgumentException e){ assertEquals("This is %s %s malformed", e.getMessage()); } //More %s than args Preconditions.checkArgument(true, "This is %s %s malformed", "A"); try{ Preconditions.checkArgument(false, "This is %s %s malformed", "A"); } catch (IllegalArgumentException e){ assertEquals("This is A %s malformed", e.getMessage()); } }
public OpenAPI read(Class<?> cls) { return read(cls, resolveApplicationPath(), null, false, null, null, new LinkedHashSet<String>(), new ArrayList<Parameter>(), new HashSet<Class<?>>()); }
@Test(description = "array schema example") public void testTicket2806() { Reader reader = new Reader(new OpenAPI()); OpenAPI openAPI = reader.read(Ticket2806Resource.class); String yaml = "openapi: 3.0.1\n" + "paths:\n" + " /test:\n" + " get:\n" + " operationId: getTest\n" + " responses:\n" + " default:\n" + " description: default response\n" + " content:\n" + " '*/*':\n" + " schema:\n" + " $ref: '#/components/schemas/Test'\n" + "components:\n" + " schemas:\n" + " Test:\n" + " type: object\n" + " properties:\n" + " stringArray:\n" + " maxItems: 4\n" + " minItems: 2\n" + " uniqueItems: true\n" + " type: array\n" + " description: Array desc\n" + " example:\n" + " - aaa\n" + " - bbb\n" + " items:\n" + " type: string\n" + " description: Hello, World!\n" + " example: Lorem ipsum dolor set\n"; SerializationMatchers.assertEqualsToYaml(openAPI, yaml); }
@CanIgnoreReturnValue public final Ordered containsAtLeast( @Nullable Object firstExpected, @Nullable Object secondExpected, @Nullable Object @Nullable ... restOfExpected) { return containsAtLeastElementsIn(accumulate(firstExpected, secondExpected, restOfExpected)); }
@Test public void iterableContainsAtLeastInOrderWithGaps() { assertThat(asList(3, 2, 5)).containsAtLeast(3, 5).inOrder(); assertThat(asList(3, 2, 2, 4, 5)).containsAtLeast(3, 2, 2, 5).inOrder(); assertThat(asList(3, 1, 4, 1, 5)).containsAtLeast(3, 1, 5).inOrder(); assertThat(asList("x", "y", "y", "z")).containsAtLeast("x", "y", "z").inOrder(); assertThat(asList("x", "x", "y", "z")).containsAtLeast("x", "y", "z").inOrder(); assertThat(asList("z", "x", "y", "z")).containsAtLeast("x", "y", "z").inOrder(); assertThat(asList("x", "x", "y", "z", "x")).containsAtLeast("x", "y", "z", "x").inOrder(); }
@Override public String ageFromNow(Locale locale, Date date) { return age(locale, system2.now() - date.getTime()); }
@Test public void get_age_from_now() { system2.setNow(DateUtils.parseDate("2014-01-02").getTime()); assertThat(underTest.ageFromNow(Locale.ENGLISH, DateUtils.parseDate("2014-01-01"))).isEqualTo("a day"); }
public void ensureIndexTemplate(IndexSet indexSet) { final IndexSetConfig indexSetConfig = indexSet.getConfig(); final String templateName = indexSetConfig.indexTemplateName(); try { var template = buildTemplate(indexSet, indexSetConfig); if (indicesAdapter.ensureIndexTemplate(templateName, template)) { LOG.info("Successfully ensured index template {}", templateName); } else { LOG.warn("Failed to create index template {}", templateName); } } catch (IgnoreIndexTemplate e) { LOG.warn(e.getMessage()); if (e.isFailOnMissingTemplate() && !indicesAdapter.indexTemplateExists(templateName)) { throw new IndexTemplateNotFoundException(f("No index template with name '%s' (type - '%s') found in Elasticsearch", templateName, indexSetConfig.indexTemplateType().orElse(null))); } } }
@Test public void ensureIndexTemplate_IfIndexTemplateExistsOnIgnoreIndexTemplate_thenNoExceptionThrown() { when(indexMappingFactory.createIndexMapping(any())) .thenThrow(new IgnoreIndexTemplate(true, "Reasom", "test", "test-template", null)); when(indicesAdapter.indexTemplateExists("test-template")).thenReturn(true); assertThatCode(() -> underTest.ensureIndexTemplate( indexSetConfig("test", "test-template", "custom"))) .doesNotThrowAnyException(); }
@Override public boolean nullsAreSortedAtStart() { return false; }
@Test void assertNullsAreSortedAtStart() { assertFalse(metaData.nullsAreSortedAtStart()); }
public int getWriteBatchSize() { return writeBatchSize; }
@Test public void getWriteBatchSize() { assertEquals(DEFAULT_WRITE_BATCH_SIZE, new MapStoreConfig().getWriteBatchSize()); }
@VisibleForTesting static StreamExecutionEnvironment createStreamExecutionEnvironment(FlinkPipelineOptions options) { return createStreamExecutionEnvironment( options, MoreObjects.firstNonNull(options.getFilesToStage(), Collections.emptyList()), options.getFlinkConfDir()); }
@Test public void shouldDetectMalformedPortStreaming() { FlinkPipelineOptions options = getDefaultPipelineOptions(); options.setRunner(FlinkRunner.class); options.setFlinkMaster("host:p0rt"); expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("Unparseable port number"); FlinkExecutionEnvironments.createStreamExecutionEnvironment(options); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return valueList.stream().map(Object::toString).collect(Collectors.joining(",")); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
@Test public void testConvertValueToStringLong() { assertEquals("9223372036854775807", ConfigDef.convertToString(Long.MAX_VALUE, Type.LONG)); assertNull(ConfigDef.convertToString(null, Type.LONG)); }
@Override public void executeUpdate(final RegisterStorageUnitStatement sqlStatement, final ContextManager contextManager) { checkDataSource(sqlStatement, contextManager); Map<String, DataSourcePoolProperties> propsMap = DataSourceSegmentsConverter.convert(database.getProtocolType(), sqlStatement.getStorageUnits()); if (sqlStatement.isIfNotExists()) { Collection<String> currentStorageUnits = getCurrentStorageUnitNames(contextManager); Collection<String> logicalDataSourceNames = getLogicalDataSourceNames(); propsMap.keySet().removeIf(currentStorageUnits::contains); propsMap.keySet().removeIf(logicalDataSourceNames::contains); } if (propsMap.isEmpty()) { return; } validateHandler.validate(propsMap, getExpectedPrivileges(sqlStatement)); try { contextManager.getPersistServiceFacade().getMetaDataManagerPersistService().registerStorageUnits(database.getName(), propsMap); } catch (final SQLException | ShardingSphereExternalException ex) { throw new StorageUnitsOperateException("register", propsMap.keySet(), ex); } }
@Test void assertExecuteUpdateWithDuplicateStorageUnitNamesWithResourceMetaData() { ContextManager contextManager = mock(ContextManager.class, RETURNS_DEEP_STUBS); when(contextManager.getStorageUnits("foo_db").keySet()).thenReturn(Collections.singleton("ds_0")); assertThrows(DuplicateStorageUnitException.class, () -> executor.executeUpdate(createRegisterStorageUnitStatement(), contextManager)); }
protected void commitTransaction(final Map<TopicPartition, OffsetAndMetadata> offsets, final ConsumerGroupMetadata consumerGroupMetadata) { if (!eosEnabled()) { throw new IllegalStateException(formatException("Exactly-once is not enabled")); } maybeBeginTransaction(); try { // EOS-v2 assumes brokers are on version 2.5+ and thus can understand the full set of consumer group metadata // Thus if we are using EOS-v1 and can't make this assumption, we must downgrade the request to include only the group id metadata final ConsumerGroupMetadata maybeDowngradedGroupMetadata = processingMode == EXACTLY_ONCE_V2 ? consumerGroupMetadata : new ConsumerGroupMetadata(consumerGroupMetadata.groupId()); producer.sendOffsetsToTransaction(offsets, maybeDowngradedGroupMetadata); producer.commitTransaction(); transactionInFlight = false; } catch (final ProducerFencedException | InvalidProducerEpochException | CommitFailedException | InvalidPidMappingException error) { throw new TaskMigratedException( formatException("Producer got fenced trying to commit a transaction"), error ); } catch (final TimeoutException timeoutException) { // re-throw to trigger `task.timeout.ms` throw timeoutException; } catch (final KafkaException error) { throw new StreamsException( formatException("Error encountered trying to commit a transaction"), error ); } }
@Test public void shouldSendOffsetToTxOnEosCommit() { eosAlphaStreamsProducer.commitTransaction(offsetsAndMetadata, new ConsumerGroupMetadata("appId")); assertThat(eosAlphaMockProducer.sentOffsets(), is(true)); }
@Override public HttpResponseOutputStream<StorageObject> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { final S3Object object = this.getDetails(file, status); final DelayedHttpEntityCallable<StorageObject> command = new DelayedHttpEntityCallable<StorageObject>(file) { @Override public StorageObject call(final HttpEntity entity) throws BackgroundException { try { final RequestEntityRestStorageService client = session.getClient(); final Path bucket = containerService.getContainer(file); client.putObjectWithRequestEntityImpl( bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), object, entity, status.getParameters()); if(log.isDebugEnabled()) { log.debug(String.format("Saved object %s with checksum %s", file, object.getETag())); } } catch(ServiceException e) { throw new S3ExceptionMappingService().map("Upload {0} failed", e, file); } return object; } @Override public long getContentLength() { return status.getLength(); } }; return this.write(file, status, command); }
@Test public void testWriteVersionedBucket() throws Exception { final S3WriteFeature feature = new S3WriteFeature(session, new S3AccessControlListFeature(session)); final Path container = new Path("versioning-test-eu-central-1-cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory)); final Path file = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final byte[] content = RandomUtils.nextBytes(5 * 1024 * 1024); final TransferStatus status = new TransferStatus(); status.setLength(content.length); status.setChecksum(new SHA256ChecksumCompute().compute(new ByteArrayInputStream(content), status)); final HttpResponseOutputStream<StorageObject> out = feature.write(file, status, new DisabledConnectionCallback()); new StreamCopier(status, status).transfer(new ByteArrayInputStream(content), out); out.close(); assertNotNull(status.getResponse().getVersionId()); final PathAttributes attr = new S3AttributesFinderFeature(session, new S3AccessControlListFeature(session)).find(file); assertEquals(status.getResponse().getVersionId(), attr.getVersionId()); assertEquals(status.getResponse().getChecksum(), attr.getChecksum()); assertEquals(status.getResponse().getETag(), attr.getETag()); assertEquals(content.length, attr.getSize()); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public MailConfiguration getConfiguration() { if (configuration == null) { configuration = new MailConfiguration(getCamelContext()); } return configuration; }
@Test public void testMailEndpointsAreConfiguredProperlyWhenUsingSmtp() { MailEndpoint endpoint = checkEndpoint("smtp://james@myhost:25/subject"); MailConfiguration config = endpoint.getConfiguration(); assertEquals("smtp", config.getProtocol(), "getProtocol()"); assertEquals("myhost", config.getHost(), "getHost()"); assertEquals(25, config.getPort(), "getPort()"); assertEquals("james", config.getUsername(), "getUsername()"); assertEquals("james@myhost", config.getRecipients().get(Message.RecipientType.TO), "getRecipients().get(Message.RecipientType.TO)"); assertEquals("INBOX", config.getFolderName(), "folder"); assertFalse(config.isDebugMode()); }
public static DLPDeidentifyText.Builder newBuilder() { return new AutoValue_DLPDeidentifyText.Builder(); }
@Test public void throwsExceptionWhenDeidentifyConfigAndTemplatesAreEmpty() { assertThrows( "Either deidentifyConfig or deidentifyTemplateName need to be set!", IllegalArgumentException.class, () -> DLPDeidentifyText.newBuilder() .setProjectId(PROJECT_ID) .setBatchSizeBytes(BATCH_SIZE_SMALL) .setColumnDelimiter(DELIMITER) .build()); }
public final DisposableServer bindNow() { return bindNow(Duration.ofSeconds(45)); }
@Test void testDisposeTimeout() { assertThatExceptionOfType(IllegalStateException.class) .isThrownBy(() -> new TestServerTransport(Mono.just(EmbeddedChannel::new)).bindNow().disposeNow(Duration.ofMillis(1))); }
public static void unregisterBean(String beanName) { final ConfigurableListableBeanFactory factory = getConfigurableBeanFactory(); if (factory instanceof DefaultSingletonBeanRegistry) { DefaultSingletonBeanRegistry registry = (DefaultSingletonBeanRegistry) factory; registry.destroySingleton(beanName); } else { throw new UtilException("Can not unregister bean, the factory is not a DefaultSingletonBeanRegistry!"); } }
@Test public void unregisterBeanTest() { registerTestAutoWired(); assertNotNull(SpringUtil.getBean("testAutoWired")); SpringUtil.unregisterBean("testAutoWired1"); try { SpringUtil.getBean("testAutoWired"); } catch (NoSuchBeanDefinitionException e) { assertEquals(e.getClass(), NoSuchBeanDefinitionException.class); } }
public CuratorFramework getClient() { return client; }
@Test void getClient() { CuratorFramework curatorFramework = client.getClient(); assertNotNull(curatorFramework); }
public static void copyPropertiesToMap(Object bean, String prefix, Map<String, Object> map) { Class clazz = bean.getClass(); Method[] methods = bean.getClass().getMethods(); for (Method method : methods) { // 复制属性 Class returnc = method.getReturnType(); if (ReflectUtils.isBeanPropertyReadMethod(method)) { String propertyName = ReflectUtils.getPropertyNameFromBeanReadMethod(method); try { if (ReflectUtils.getPropertySetterMethod(clazz, propertyName, returnc) == null) { continue; // 还需要有set方法 } } catch (Exception e) { continue; } Object val; try { val = method.invoke(bean); } catch (InvocationTargetException e) { throw new SofaRpcRuntimeException("Can't access copy " + propertyName, e.getCause()); } catch (IllegalAccessException e) { throw new SofaRpcRuntimeException("Can't access copy " + propertyName, e); } if (val != null) { // 值不为空,放入缓存 map.put(prefix + propertyName, val); } } } Field[] fields = bean.getClass().getFields(); for (Field field : fields) { String fieldName = field.getName(); if (map.containsKey(prefix + fieldName)) { continue; } int m = field.getModifiers(); if (!Modifier.isStatic(m) && !Modifier.isTransient(m)) { Object val = null; try { if (field.isAccessible()) { val = field.get(bean); } else { try { field.setAccessible(true); val = field.get(bean); } finally { field.setAccessible(false); } } } catch (IllegalAccessException e) { // LOGGER.warn("Can't access field" + fieldName + "when copy value to context", e); } if (val != null) { map.put(prefix + fieldName, val); } } } }
@Test public void testCopyProptertiesToMap() throws Exception { TestBean config = new TestBean(); config.setAlias("1111aaaa"); Map<String, Object> map = new HashMap<String, Object>(); BeanUtils.copyPropertiesToMap(config, "", map); Assert.assertTrue(map.size() > 0); Assert.assertEquals(map.get("alias"), "1111aaaa"); }
public long getMemorySize() { return memoryBudget.getTotalMemorySize(); }
@Test void testMemoryTooBigReservation() { long size = memoryManager.getMemorySize() + PAGE_SIZE; testCannotReserveAnymore(size); }
public int getConTimeOutMillis() { return conTimeOutMillis; }
@Test void testGetConTimeOutMillis() { HttpClientConfig config = HttpClientConfig.builder().setConTimeOutMillis(1000).build(); assertEquals(1000, config.getConTimeOutMillis()); }
@Override public Object construct(String componentName) { ClusteringConfiguration clusteringConfiguration = configuration.clustering(); boolean shouldSegment = clusteringConfiguration.cacheMode().needsStateTransfer(); int level = configuration.locking().concurrencyLevel(); MemoryConfiguration memoryConfiguration = configuration.memory(); boolean offHeap = memoryConfiguration.isOffHeap(); EvictionStrategy strategy = memoryConfiguration.whenFull(); //handle case when < 0 value signifies unbounded container or when we are not removal based if (strategy.isExceptionBased() || !strategy.isEnabled()) { if (offHeap) { if (shouldSegment) { int segments = clusteringConfiguration.hash().numSegments(); Supplier<PeekableTouchableMap<WrappedBytes, WrappedBytes>> mapSupplier = this::createAndStartOffHeapConcurrentMap; if (clusteringConfiguration.l1().enabled()) { return new L1SegmentedDataContainer<>(mapSupplier, segments); } return new DefaultSegmentedDataContainer<>(mapSupplier, segments); } else { return new OffHeapDataContainer(); } } else if (shouldSegment) { Supplier<PeekableTouchableMap<Object, Object>> mapSupplier = PeekableTouchableContainerMap::new; int segments = clusteringConfiguration.hash().numSegments(); if (clusteringConfiguration.l1().enabled()) { return new L1SegmentedDataContainer<>(mapSupplier, segments); } return new DefaultSegmentedDataContainer<>(mapSupplier, segments); } else { return DefaultDataContainer.unBoundedDataContainer(level); } } boolean sizeInBytes = memoryConfiguration.maxSize() != null; long thresholdSize = sizeInBytes ? memoryConfiguration.maxSizeBytes() : memoryConfiguration.maxCount(); DataContainer<?, ?> dataContainer; if (offHeap) { if (shouldSegment) { int segments = clusteringConfiguration.hash().numSegments(); dataContainer = new SegmentedBoundedOffHeapDataContainer(segments, thresholdSize, memoryConfiguration.evictionType()); } else { dataContainer = new BoundedOffHeapDataContainer(thresholdSize, memoryConfiguration.evictionType()); } } else if (shouldSegment) { int segments = clusteringConfiguration.hash().numSegments(); dataContainer = new BoundedSegmentedDataContainer<>(segments, thresholdSize, memoryConfiguration.evictionType()); } else { dataContainer = DefaultDataContainer.boundedDataContainer(level, thresholdSize, memoryConfiguration.evictionType()); } if (sizeInBytes) { memoryConfiguration.attributes().attribute(MemoryConfiguration.MAX_SIZE) .addListener((newSize, old) -> dataContainer.resize(memoryConfiguration.maxSizeBytes())); } else { memoryConfiguration.attributes().attribute(MemoryConfiguration.MAX_COUNT) .addListener((newSize, old) -> dataContainer.resize(newSize.get())); } return dataContainer; }
@Test public void testSegmentedL1() { dataContainerFactory.configuration = new ConfigurationBuilder().clustering() .cacheMode(CacheMode.DIST_ASYNC) .l1().enable().build(); Object component = dataContainerFactory.construct(COMPONENT_NAME); assertEquals(L1SegmentedDataContainer.class, component.getClass()); }
public Collection<ServerPluginInfo> loadPlugins() { Map<String, ServerPluginInfo> bundledPluginsByKey = new LinkedHashMap<>(); for (ServerPluginInfo bundled : getBundledPluginsMetadata()) { failIfContains(bundledPluginsByKey, bundled, plugin -> MessageException.of(format("Found two versions of the plugin %s [%s] in the directory %s. Please remove one of %s or %s.", bundled.getName(), bundled.getKey(), getRelativeDir(fs.getInstalledBundledPluginsDir()), bundled.getNonNullJarFile().getName(), plugin.getNonNullJarFile().getName()))); bundledPluginsByKey.put(bundled.getKey(), bundled); } Map<String, ServerPluginInfo> externalPluginsByKey = new LinkedHashMap<>(); for (ServerPluginInfo external : getExternalPluginsMetadata()) { failIfContains(bundledPluginsByKey, external, plugin -> MessageException.of(format("Found a plugin '%s' in the directory '%s' with the same key [%s] as a built-in feature '%s'. Please remove '%s'.", external.getName(), getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getKey(), plugin.getName(), new File(getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getNonNullJarFile().getName())))); failIfContains(externalPluginsByKey, external, plugin -> MessageException.of(format("Found two versions of the plugin '%s' [%s] in the directory '%s'. Please remove %s or %s.", external.getName(), external.getKey(), getRelativeDir(fs.getInstalledExternalPluginsDir()), external.getNonNullJarFile().getName(), plugin.getNonNullJarFile().getName()))); externalPluginsByKey.put(external.getKey(), external); } for (PluginInfo downloaded : getDownloadedPluginsMetadata()) { failIfContains(bundledPluginsByKey, downloaded, plugin -> MessageException.of(format("Fail to update plugin: %s. Built-in feature with same key already exists: %s. Move or delete plugin from %s directory", plugin.getName(), plugin.getKey(), getRelativeDir(fs.getDownloadedPluginsDir())))); ServerPluginInfo installedPlugin; if (externalPluginsByKey.containsKey(downloaded.getKey())) { deleteQuietly(externalPluginsByKey.get(downloaded.getKey()).getNonNullJarFile()); installedPlugin = moveDownloadedPluginToExtensions(downloaded); LOG.info("Plugin {} [{}] updated to version {}", installedPlugin.getName(), installedPlugin.getKey(), installedPlugin.getVersion()); } else { installedPlugin = moveDownloadedPluginToExtensions(downloaded); LOG.info("Plugin {} [{}] installed", installedPlugin.getName(), installedPlugin.getKey()); } externalPluginsByKey.put(downloaded.getKey(), installedPlugin); } Map<String, ServerPluginInfo> plugins = new HashMap<>(externalPluginsByKey.size() + bundledPluginsByKey.size()); plugins.putAll(externalPluginsByKey); plugins.putAll(bundledPluginsByKey); PluginRequirementsValidator.unloadIncompatiblePlugins(plugins); return plugins.values(); }
@Test public void fail_if_external_plugins_have_same_key() throws IOException { File jar1 = createJar(fs.getInstalledExternalPluginsDir(), "plugin1", "main", null); File jar2 = createJar(fs.getInstalledExternalPluginsDir(), "plugin1", "main", null); String dir = getDirName(fs.getInstalledExternalPluginsDir()); assertThatThrownBy(() -> underTest.loadPlugins()) .isInstanceOf(MessageException.class) .hasMessageContaining("Found two versions of the plugin 'plugin1' [plugin1] in the directory '" + dir + "'. Please remove ") .hasMessageContaining(jar2.getName()) .hasMessageContaining(jar1.getName()); }
@Override public URL getURL(HttpRequest request) { var externalUrl = haloProperties.getExternalUrl(); if (externalUrl == null) { try { externalUrl = request.getURI().resolve(getBasePath()).toURL(); } catch (MalformedURLException e) { throw new RuntimeException("Cannot parse request URI to URL.", e); } } return externalUrl; }
@Test void getURLWhenExternalURLAbsent() throws MalformedURLException { var fakeUri = URI.create("https://localhost/fake"); when(haloProperties.getExternalUrl()).thenReturn(null); var mockRequest = mock(HttpRequest.class); when(mockRequest.getURI()).thenReturn(fakeUri); var url = externalUrl.getURL(mockRequest); assertEquals(new URL("https://localhost/"), url); }
public Flowable<EthBlock> replayBlocksFlowable( DefaultBlockParameter startBlock, DefaultBlockParameter endBlock, boolean fullTransactionObjects) { return replayBlocksFlowable(startBlock, endBlock, fullTransactionObjects, true); }
@Test void testReplayBlocksFlowableWhenIOExceptionOnBlockResolving() throws IOException { Web3j web3j = mock(Web3j.class, RETURNS_DEEP_STUBS); when(web3j.ethGetBlockByNumber(any(), anyBoolean()).send()) .thenThrow(new IOException("fail")); JsonRpc2_0Rx rpc = new JsonRpc2_0Rx(web3j, Executors.newSingleThreadScheduledExecutor()); Flowable<EthBlock> flowable = rpc.replayBlocksFlowable( mock(DefaultBlockParameter.class), mock(DefaultBlockParameter.class), false, false); EthBlock expected = mock(EthBlock.class); EthBlock actual = flowable.onErrorReturnItem(expected).blockingFirst(); assertSame(expected, actual, "unexpected returned block"); }
public static RowCoder of(Schema schema) { return new RowCoder(schema); }
@Test public void testConsistentWithEqualsArrayWithNull() throws Exception { Schema schema = Schema.builder() .addField("a", Schema.FieldType.array(Schema.FieldType.INT32.withNullable(true))) .build(); Row row = Row.withSchema(schema).addValue(Arrays.asList(1, null)).build(); CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row); }
List<Token> tokenize() throws ScanException { List<Token> tokenList = new ArrayList<Token>(); StringBuffer buf = new StringBuffer(); while (pointer < patternLength) { char c = pattern.charAt(pointer); pointer++; switch (state) { case LITERAL_STATE: handleLiteralState(c, tokenList, buf); break; case FORMAT_MODIFIER_STATE: handleFormatModifierState(c, tokenList, buf); break; case OPTION_STATE: processOption(c, tokenList, buf); break; case KEYWORD_STATE: handleKeywordState(c, tokenList, buf); break; case RIGHT_PARENTHESIS_STATE: handleRightParenthesisState(c, tokenList, buf); break; default: } } // EOS switch (state) { case LITERAL_STATE: addValuedToken(Token.LITERAL, buf, tokenList); break; case KEYWORD_STATE: tokenList.add(new Token(Token.SIMPLE_KEYWORD, buf.toString())); break; case RIGHT_PARENTHESIS_STATE: tokenList.add(Token.RIGHT_PARENTHESIS_TOKEN); break; case FORMAT_MODIFIER_STATE: case OPTION_STATE: throw new ScanException("Unexpected end of pattern string"); } return tokenList; }
@Test public void testMultipleRecursion() throws ScanException { List<Token> tl = new TokenStream("%-1(%d %45(%class %file))").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.FORMAT_MODIFIER, "-1")); witness.add(Token.BARE_COMPOSITE_KEYWORD_TOKEN); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "d")); witness.add(new Token(Token.LITERAL, " ")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.FORMAT_MODIFIER, "45")); witness.add(Token.BARE_COMPOSITE_KEYWORD_TOKEN); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "class")); witness.add(new Token(Token.LITERAL, " ")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "file")); witness.add(Token.RIGHT_PARENTHESIS_TOKEN); witness.add(Token.RIGHT_PARENTHESIS_TOKEN); assertEquals(witness, tl); }
public void getServiceId() { try { ServiceInfo serviceInfo = client.getServiceInfoByName(SERVICE_NAME); serviceId = serviceInfo.getServiceId(); } catch (StarClientException e) { LOG.warn("Failed to get serviceId from starMgr. Error:", e); return; } LOG.info("get serviceId {} from starMgr", serviceId); }
@Test public void testGetServiceId() throws Exception { new Expectations() { { client.getServiceInfoByName(SERVICE_NAME).getServiceId(); minTimes = 0; result = "2"; } }; starosAgent.getServiceId(); Assert.assertEquals("2", Deencapsulation.getField(starosAgent, "serviceId")); }
@Override protected double maintain() { NodeList allNodes; // Host and child nodes are written in separate transactions, but both are written while holding the // unallocated lock. Hold the unallocated lock while reading nodes to ensure we get all the children // of newly provisioned hosts. try (Mutex ignored = nodeRepository().nodes().lockUnallocated()) { allNodes = nodeRepository().nodes().list(); } NodeList hosts = allNodes.state(Node.State.provisioned).nodeType(NodeType.host, NodeType.confighost, NodeType.controllerhost); int failures = 0; for (Node host : hosts) { try { HostIpConfig hostIpConfig = hostProvisioner.provision(host); setIpConfig(host, hostIpConfig); } catch (IllegalArgumentException | IllegalStateException e) { log.log(Level.INFO, "Could not provision " + host.hostname() + ", will retry in " + interval() + ": " + Exceptions.toMessageString(e)); } catch (ThrottleProvisioningException e) { log.log(Level.INFO, "Failed to provision " + host.hostname() + ", will retry in " + interval() + ": " + e.getMessage()); break; } catch (FatalProvisioningException e) { // FatalProvisioningException is thrown if node is not found in the cloud, allow for // some time for the state to propagate if (host.history().age(clock().instant()).getSeconds() < 30) continue; failures++; log.log(Level.SEVERE, "Failed to provision " + host.hostname() + ", failing out the host recursively", e); nodeRepository().nodes().parkRecursively( host.hostname(), Agent.HostResumeProvisioner, true, "Failed by HostResumeProvisioner due to provisioning failure"); } catch (RuntimeException e) { if (e.getCause() instanceof NamingException) log.log(Level.INFO, "Could not provision " + host.hostname() + ", will retry in " + interval() + ": " + Exceptions.toMessageString(e)); else { failures++; log.log(Level.WARNING, "Failed to provision " + host.hostname() + ", will retry in " + interval(), e); } } } return asSuccessFactorDeviation(hosts.size(), failures); }
@Test public void delegates_to_host_provisioner_and_writes_back_result() { deployApplication(); Node host = tester.nodeRepository().nodes().node("host100").orElseThrow(); Node node = tester.nodeRepository().nodes().node("host100-1").orElseThrow(); assertTrue("No IP addresses assigned", Stream.of(host, node).map(n -> n.ipConfig().primary()).allMatch(List::isEmpty)); Node hostNew = host.with(host.ipConfig().withPrimary(List.of("::100:0")).withPool(host.ipConfig().pool().withIpAddresses(List.of("::100:1", "::100:2")))); Node nodeNew = node.with(IP.Config.ofEmptyPool("::100:1")); hostResumeProvisioner.maintain(); assertEquals(hostNew.ipConfig(), tester.nodeRepository().nodes().node("host100").get().ipConfig()); assertEquals(nodeNew.ipConfig(), tester.nodeRepository().nodes().node("host100-1").get().ipConfig()); }
static String classicGroupHeartbeatKey(String groupId, String memberId) { return "heartbeat-" + groupId + "-" + memberId; }
@Test public void testNewMemberTimeoutCompletion() throws Exception { GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder() .build(); ClassicGroup group = context.createClassicGroup("group-id"); GroupMetadataManagerTestContext.JoinResult joinResult = context.sendClassicGroupJoin( new GroupMetadataManagerTestContext.JoinGroupRequestBuilder() .withGroupId("group-id") .withMemberId(UNKNOWN_MEMBER_ID) .withDefaultProtocolTypeAndProtocols() .withSessionTimeoutMs(context.classicGroupNewMemberJoinTimeoutMs + 5000) .build() ); assertTrue(joinResult.records.isEmpty()); assertFalse(joinResult.joinFuture.isDone()); // Advance clock by initial rebalance delay to complete join phase. GroupMetadataManagerTestContext.assertNoOrEmptyResult(context.sleep(context.classicGroupInitialRebalanceDelayMs)); assertTrue(joinResult.joinFuture.isDone()); assertEquals(Errors.NONE.code(), joinResult.joinFuture.get().errorCode()); assertEquals(1, group.generationId()); assertTrue(group.isInState(COMPLETING_REBALANCE)); assertEquals(0, group.allMembers().stream().filter(ClassicGroupMember::isNew).count()); GroupMetadataManagerTestContext.SyncResult syncResult = context.sendClassicGroupSync( new GroupMetadataManagerTestContext.SyncGroupRequestBuilder() .withGroupId("group-id") .withMemberId(joinResult.joinFuture.get().memberId()) .withGenerationId(joinResult.joinFuture.get().generationId()) .build() ); // Simulate a successful write to the log. syncResult.appendFuture.complete(null); assertTrue(syncResult.syncFuture.isDone()); assertEquals(Errors.NONE.code(), syncResult.syncFuture.get().errorCode()); assertEquals(1, group.numMembers()); // Make sure the NewMemberTimeout is not still in effect, and the member is not kicked GroupMetadataManagerTestContext.assertNoOrEmptyResult(context.sleep(context.classicGroupNewMemberJoinTimeoutMs)); assertEquals(1, group.numMembers()); // Member should be removed as heartbeat expires. The group is now empty. List<ExpiredTimeout<Void, CoordinatorRecord>> timeouts = context.sleep(5000); List<CoordinatorRecord> expectedRecords = Collections.singletonList(GroupMetadataManagerTestContext.newGroupMetadataRecord( group.groupId(), new GroupMetadataValue() .setMembers(Collections.emptyList()) .setGeneration(2) .setLeader(null) .setProtocolType("consumer") .setProtocol(null) .setCurrentStateTimestamp(context.time.milliseconds()), MetadataVersion.latestTesting()) ); assertEquals(1, timeouts.size()); String memberId = joinResult.joinFuture.get().memberId(); timeouts.forEach(timeout -> { assertEquals(classicGroupHeartbeatKey("group-id", memberId), timeout.key); assertEquals(expectedRecords, timeout.result.records()); }); assertEquals(0, group.numMembers()); assertTrue(group.isInState(EMPTY)); }
public static COSBoolean getBoolean( boolean value ) { return value ? TRUE : FALSE; }
@Test void testGetBoolean() { assertEquals(cosBooleanTrue, COSBoolean.getBoolean(Boolean.TRUE)); assertEquals(cosBooleanFalse, COSBoolean.getBoolean(Boolean.FALSE)); }
public Collection<? extends CheckpointCommittableManager<CommT>> getCheckpointCommittablesUpTo( long checkpointId) { // clean up fully committed previous checkpoints // this wouldn't work with concurrent unaligned checkpoints Collection<CheckpointCommittableManagerImpl<CommT>> checkpoints = checkpointCommittables.headMap(checkpointId, true).values(); checkpoints.removeIf(CheckpointCommittableManagerImpl::isFinished); return checkpoints; }
@Test void testGetCheckpointCommittablesUpTo() { final CommittableCollector<Integer> committableCollector = new CommittableCollector<>(1, 1, METRIC_GROUP); CommittableSummary<Integer> first = new CommittableSummary<>(1, 1, 1L, 1, 0, 0); committableCollector.addMessage(first); CommittableSummary<Integer> second = new CommittableSummary<>(1, 1, 2L, 1, 0, 0); committableCollector.addMessage(second); committableCollector.addMessage(new CommittableSummary<>(1, 1, 3L, 1, 0, 0)); assertThat(committableCollector.getCheckpointCommittablesUpTo(2)).hasSize(2); assertThat(committableCollector.getEndOfInputCommittable()).isNull(); }
public int findActualTableIndex(final String dataSourceName, final String actualTableName) { return dataNodeIndexMap.getOrDefault(new DataNode(dataSourceName, actualTableName), -1); }
@Test void assertFindActualTableIndex() { ShardingTable actual = new ShardingTable(new ShardingTableRuleConfiguration("LOGIC_TABLE", "ds${0..1}.table_${0..2}"), Arrays.asList("ds0", "ds1"), null); assertThat(actual.findActualTableIndex("ds1", "table_1"), is(4)); }
@Override public void define(Context context) { NewController controller = context .createController("api/push") .setSince("9.4") .setDescription("Endpoints supporting server side events."); actions.forEach(action -> action.define(controller)); controller.done(); }
@Test public void define_ws() { WebService.Context context = new WebService.Context(); underTest.define(context); WebService.Controller controller = context.controller("api/push"); assertThat(controller).isNotNull(); assertThat(controller.path()).isEqualTo("api/push"); assertThat(controller.since()).isEqualTo("9.4"); assertThat(controller.description()).isNotEmpty(); assertThat(controller.actions()).isNotEmpty(); }
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) { return decoder.decodeFunctionResult(rawInput, outputParameters); }
@Test public void testDecodeDynamicStruct() { String rawInput = "0x0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000040" + "0000000000000000000000000000000000000000000000000000000000000080" + "0000000000000000000000000000000000000000000000000000000000000002" + "6964000000000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000004" + "6e616d6500000000000000000000000000000000000000000000000000000000"; assertEquals( FunctionReturnDecoder.decode( rawInput, AbiV2TestFixture.getFooFunction.getOutputParameters()), Collections.singletonList(new AbiV2TestFixture.Foo("id", "name"))); }
public static <T, E extends Throwable> CompletableFuture<T> handleException( CompletableFuture<? extends T> completableFuture, Class<E> exceptionClass, Function<? super E, ? extends T> exceptionHandler) { final CompletableFuture<T> handledFuture = new CompletableFuture<>(); checkNotNull(completableFuture) .whenComplete( (result, throwable) -> { if (throwable == null) { handledFuture.complete(result); } else if (exceptionClass.isAssignableFrom(throwable.getClass())) { final E exception = exceptionClass.cast(throwable); try { handledFuture.complete(exceptionHandler.apply(exception)); } catch (Throwable t) { handledFuture.completeExceptionally(t); } } else { handledFuture.completeExceptionally(throwable); } }); return handledFuture; }
@Test void testHandleExceptionWithNotMatchingExceptionallyCompletedFuture() { final CompletableFuture<String> future = new CompletableFuture<>(); final CompletableFuture<String> handled = FutureUtils.handleException( future, UnsupportedOperationException.class, exception -> "handled"); final IllegalArgumentException futureException = new IllegalArgumentException("foobar"); future.completeExceptionally(futureException); assertThatFuture(handled) .eventuallyFailsWith(ExecutionException.class) .withCause(futureException); }
public static int calculateFor(final ConnectionSession connectionSession) { int result = 0; result |= connectionSession.isAutoCommit() ? MySQLStatusFlag.SERVER_STATUS_AUTOCOMMIT.getValue() : 0; result |= connectionSession.getTransactionStatus().isInTransaction() ? MySQLStatusFlag.SERVER_STATUS_IN_TRANS.getValue() : 0; return result; }
@Test void assertNotAutoCommitNotInTransaction() { assertThat(ServerStatusFlagCalculator.calculateFor(connectionSession), is(0)); }
public static Optional<SchemaAndId> getLatestSchemaAndId( final SchemaRegistryClient srClient, final String topic, final boolean isKey ) { final String subject = KsqlConstants.getSRSubject(topic, isKey); return getLatestSchemaId(srClient, topic, isKey) .map(id -> { try { return new SchemaAndId(srClient.getSchemaById(id), id); } catch (final Exception e) { throwOnAuthError(e, subject); throw new KsqlException( "Could not get schema for subject " + subject + " and id " + id, e); } }); }
@Test public void shouldReturnParsedSchemaFromSubjectKey() throws Exception { // Given: when(schemaMetadata.getId()).thenReturn(123); when(schemaRegistryClient.getLatestSchemaMetadata("bar-key")) .thenReturn(schemaMetadata); when(schemaRegistryClient.getSchemaById(123)) .thenReturn(AVRO_SCHEMA); // When: final Optional<SchemaAndId> schemaAndId = SchemaRegistryUtil.getLatestSchemaAndId(schemaRegistryClient, "bar", true); // Then: assertThat(schemaAndId.get().getSchema(), equalTo(AVRO_SCHEMA)); }
@Override public void writeData(ObjectDataOutput out) throws IOException { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void testWriteData() throws Exception { dataEvent.writeData(null); }
public BitmapDocIdIterator(ImmutableRoaringBitmap docIds, int numDocs) { _docIds = docIds; _docIdIterator = docIds.getIntIterator(); _numDocs = numDocs; }
@Test public void testBitmapDocIdIterator() { int[] docIds = new int[]{1, 2, 4, 5, 6, 8, 12, 15, 16, 18, 20, 21}; MutableRoaringBitmap bitmap = new MutableRoaringBitmap(); bitmap.add(docIds); int numDocs = 25; BitmapDocIdIterator docIdIterator = new BitmapDocIdIterator(bitmap, numDocs); assertEquals(docIdIterator.advance(2), 2); assertEquals(docIdIterator.advance(3), 4); assertEquals(docIdIterator.next(), 5); assertEquals(docIdIterator.advance(6), 6); assertEquals(docIdIterator.next(), 8); assertEquals(docIdIterator.advance(13), 15); assertEquals(docIdIterator.advance(19), 20); assertEquals(docIdIterator.next(), 21); assertEquals(docIdIterator.next(), Constants.EOF); }
@Override public ListenableFuture<BufferResult> get(OutputBufferId bufferId, long startingSequenceId, DataSize maxSize) { checkState(!Thread.holdsLock(this), "Can not get pages while holding a lock on this"); requireNonNull(bufferId, "bufferId is null"); checkArgument(maxSize.toBytes() > 0, "maxSize must be at least 1 byte"); return getBuffer(bufferId).getPages(startingSequenceId, maxSize, Optional.of(masterBuffer)); }
@Test public void testDuplicateRequests() { ArbitraryOutputBuffer buffer = createArbitraryBuffer( createInitialEmptyOutputBuffers(ARBITRARY) .withBuffer(FIRST, BROADCAST_PARTITION_ID) .withNoMoreBufferIds(), sizeOfPages(10)); // add three items for (int i = 0; i < 3; i++) { addPage(buffer, createPage(i)); } // add a queue assertQueueState(buffer, 3, FIRST, 0, 0); // get the three elements assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2))); // pages not acknowledged yet so state is the same assertQueueState(buffer, 0, FIRST, 3, 0); // get the three elements again assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2))); // pages not acknowledged yet so state is the same assertQueueState(buffer, 0, FIRST, 3, 0); // acknowledge the pages buffer.get(FIRST, 3, sizeOfPages(10)).cancel(true); // attempt to get the three elements again assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, false)); // pages are acknowledged assertQueueState(buffer, 0, FIRST, 0, 3); }
@Override public void run() throws InvalidInputException { String tableType = _input.getTableType(); if ((tableType.equalsIgnoreCase(REALTIME) || tableType.equalsIgnoreCase(HYBRID))) { _output.setAggregateMetrics(shouldAggregate(_input)); } }
@Test public void testRunNonAggregateWithNonSumFunction() throws Exception { Set<String> metrics = ImmutableSet.of("a", "b", "c"); InputManager input = createInput(metrics, "select sum(a), sum(b), max(c) from tableT"); ConfigManager output = new ConfigManager(); AggregateMetricsRule rule = new AggregateMetricsRule(input, output); rule.run(); assertFalse(output.isAggregateMetrics()); }
public static List<Criterion> parse(String filter) { return StreamSupport.stream(CRITERIA_SPLITTER.split(filter).spliterator(), false) .map(FilterParser::parseCriterion) .toList(); }
@Test public void parse_filter_having_only_key() { List<Criterion> criterion = FilterParser.parse("isFavorite"); assertThat(criterion) .extracting(Criterion::getKey, Criterion::getOperator, Criterion::getValue) .containsOnly( tuple("isFavorite", null, null)); }
@Override public ConfigAllInfo findConfigAllInfo(final String dataId, final String group, final String tenant) { final String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(), TableConstant.CONFIG_INFO); final String sql = configInfoMapper.select( Arrays.asList("id", "data_id", "group_id", "tenant_id", "app_name", "content", "md5", "gmt_create", "gmt_modified", "src_user", "src_ip", "c_desc", "c_use", "effect", "type", "c_schema", "encrypted_data_key"), Arrays.asList("data_id", "group_id", "tenant_id")); List<String> configTagList = selectTagByConfig(dataId, group, tenant); ConfigAllInfo configAdvance = databaseOperate.queryOne(sql, new Object[] {dataId, group, tenantTmp}, CONFIG_ALL_INFO_ROW_MAPPER); if (configTagList != null && !configTagList.isEmpty()) { StringBuilder configTagsTmp = new StringBuilder(); for (String configTag : configTagList) { if (configTagsTmp.length() == 0) { configTagsTmp.append(configTag); } else { configTagsTmp.append(',').append(configTag); } } configAdvance.setConfigTags(configTagsTmp.toString()); } return configAdvance; }
@Test void testFindConfigAllInfo() { String dataId = "dataId1324"; String group = "group23546"; String tenant = "tenant13245"; //mock select tags List<String> mockTags = Arrays.asList("tag1", "tag2", "tag3"); when(databaseOperate.queryMany(anyString(), eq(new Object[] {dataId, group, tenant}), eq(String.class))).thenReturn(mockTags); String schema = "schema12345654"; //mock select config advance ConfigAllInfo mockedConfig = new ConfigAllInfo(); mockedConfig.setSchema(schema); when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant}), eq(CONFIG_ALL_INFO_ROW_MAPPER))).thenReturn( mockedConfig); //execute return mock obj ConfigAllInfo configAllInfo = embeddedConfigInfoPersistService.findConfigAllInfo(dataId, group, tenant); //expect check schema & tags. assertEquals(mockedConfig.getSchema(), configAllInfo.getSchema()); assertEquals(String.join(",", mockTags), configAllInfo.getConfigTags()); }
public static Map<String, Exception> validate(final Map<String, DataSourcePoolProperties> propsMap, final Collection<PrivilegeCheckType> expectedPrivileges) { Map<String, Exception> result = new LinkedHashMap<>(propsMap.size(), 1F); for (Entry<String, DataSourcePoolProperties> entry : propsMap.entrySet()) { try { validateProperties(entry.getKey(), entry.getValue()); validateConnection(entry.getKey(), entry.getValue(), expectedPrivileges); } catch (final InvalidDataSourcePoolPropertiesException ex) { result.put(entry.getKey(), ex); } } return result; }
@Test void assertValidate() { assertTrue(DataSourcePoolPropertiesValidator.validate( Collections.singletonMap("name", new DataSourcePoolProperties(HikariDataSource.class.getName(), Collections.singletonMap("jdbcUrl", "jdbc:mock"))), Collections.emptySet()).isEmpty()); }
@Override public <T> T convert(DataTable dataTable, Type type) { return convert(dataTable, type, false); }
@Test void convert_to_lists_of_unknown_type__throws_exception__register_transformer() { DataTable table = parse("", " | 1911-03-20 |", " | 1916-09-13 |", " | 1907-11-14 |"); CucumberDataTableException exception = assertThrows( CucumberDataTableException.class, () -> converter.convert(table, LIST_OF_LIST_OF_DATE)); assertThat(exception.getMessage(), is("" + "Can't convert DataTable to List<List<java.util.Date>>.\n" + "Please review these problems:\n" + "\n" + " - There was no table cell transformer registered for java.util.Date.\n" + " Please consider registering a table cell transformer.\n" + "\n" + " - There was no default table cell transformer registered to transform java.util.Date.\n" + " Please consider registering a default table cell transformer.\n" + "\n" + "Note: Usually solving one is enough")); }
public Object toIdObject(String baseId) throws AmqpProtocolException { if (baseId == null) { return null; } try { if (hasAmqpUuidPrefix(baseId)) { String uuidString = strip(baseId, AMQP_UUID_PREFIX_LENGTH); return UUID.fromString(uuidString); } else if (hasAmqpUlongPrefix(baseId)) { String longString = strip(baseId, AMQP_ULONG_PREFIX_LENGTH); return UnsignedLong.valueOf(longString); } else if (hasAmqpStringPrefix(baseId)) { return strip(baseId, AMQP_STRING_PREFIX_LENGTH); } else if (hasAmqpBinaryPrefix(baseId)) { String hexString = strip(baseId, AMQP_BINARY_PREFIX_LENGTH); byte[] bytes = convertHexStringToBinary(hexString); return new Binary(bytes); } else { // We have a string without any type prefix, transmit it as-is. return baseId; } } catch (IllegalArgumentException e) { throw new AmqpProtocolException("Unable to convert ID value"); } }
@Test public void testToIdObjectWithStringContainingBinaryHexThrowsWithUnevenLengthString() { String unevenHead = AMQPMessageIdHelper.AMQP_BINARY_PREFIX + "123"; try { messageIdHelper.toIdObject(unevenHead); fail("expected exception was not thrown"); } catch (AmqpProtocolException ex) { // expected } }
public static IntrinsicMapTaskExecutor withSharedCounterSet( List<Operation> operations, CounterSet counters, ExecutionStateTracker executionStateTracker) { return new IntrinsicMapTaskExecutor(operations, counters, executionStateTracker); }
@Test public void testValidOperations() throws Exception { TestOutputReceiver receiver = new TestOutputReceiver(counterSet, NameContextsForTests.nameContextForTest()); List<Operation> operations = Arrays.<Operation>asList(new TestReadOperation(receiver, createContext("ReadOperation"))); ExecutionStateTracker stateTracker = ExecutionStateTracker.newForTest(); try (IntrinsicMapTaskExecutor executor = IntrinsicMapTaskExecutor.withSharedCounterSet(operations, counterSet, stateTracker)) { Assert.assertEquals(operations.get(0), executor.getReadOperation()); } }
@Override public Optional<DatabaseAdminExecutor> create(final SQLStatementContext sqlStatementContext) { SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof ShowFunctionStatusStatement) { return Optional.of(new ShowFunctionStatusExecutor((ShowFunctionStatusStatement) sqlStatement)); } if (sqlStatement instanceof ShowProcedureStatusStatement) { return Optional.of(new ShowProcedureStatusExecutor((ShowProcedureStatusStatement) sqlStatement)); } if (sqlStatement instanceof ShowTablesStatement) { return Optional.of(new ShowTablesExecutor((ShowTablesStatement) sqlStatement, sqlStatementContext.getDatabaseType())); } return Optional.empty(); }
@Test void assertCreateWithDMLStatement() { when(sqlStatementContext.getSqlStatement()).thenReturn(new MySQLDeleteStatement()); Optional<DatabaseAdminExecutor> actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "delete from t", "", Collections.emptyList()); assertThat(actual, is(Optional.empty())); }
public void updateNameServerConfig(final Properties properties, final List<String> nameServers, long timeoutMillis) throws UnsupportedEncodingException, InterruptedException, RemotingTimeoutException, RemotingSendRequestException, RemotingConnectException, MQClientException { String str = MixAll.properties2String(properties); if (str == null || str.length() < 1) { return; } List<String> invokeNameServers = (nameServers == null || nameServers.isEmpty()) ? this.remotingClient.getNameServerAddressList() : nameServers; if (invokeNameServers == null || invokeNameServers.isEmpty()) { return; } RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.UPDATE_NAMESRV_CONFIG, null); request.setBody(str.getBytes(MixAll.DEFAULT_CHARSET)); RemotingCommand errResponse = null; for (String nameServer : invokeNameServers) { RemotingCommand response = this.remotingClient.invokeSync(nameServer, request, timeoutMillis); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { break; } default: errResponse = response; } } if (errResponse != null) { throw new MQClientException(errResponse.getCode(), errResponse.getRemark()); } }
@Test public void testUpdateNameServerConfig() throws RemotingException, InterruptedException, MQClientException, UnsupportedEncodingException { mockInvokeSync(); mqClientAPI.updateNameServerConfig(createProperties(), Collections.singletonList(defaultNsAddr), defaultTimeout); }
<T extends PipelineOptions> T as(Class<T> iface) { checkNotNull(iface); checkArgument(iface.isInterface(), "Not an interface: %s", iface); T existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { synchronized (this) { // double check existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { Registration<T> registration = PipelineOptionsFactory.CACHE .get() .validateWellFormed(iface, computedProperties.knownInterfaces); List<PropertyDescriptor> propertyDescriptors = registration.getPropertyDescriptors(); Class<T> proxyClass = registration.getProxyClass(); existingOption = InstanceBuilder.ofType(proxyClass) .fromClass(proxyClass) .withArg(InvocationHandler.class, this) .build(); computedProperties = computedProperties.updated(iface, existingOption, propertyDescriptors); } } } return existingOption; }
@Test public void testDisplayDataExcludesValuesAccessedButNeverSet() { HasDefaults options = PipelineOptionsFactory.as(HasDefaults.class); assertEquals("bar", options.getFoo()); DisplayData data = DisplayData.from(options); assertThat(data, not(hasDisplayItem("foo"))); }