focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public Object set(final String property, final Object value) { Objects.requireNonNull(value, "value"); final Object parsed = parser.parse(property, value); return props.put(property, parsed); }
@Test(expected = IllegalArgumentException.class) public void shouldNotAllowUnknownKsqlConfigToBeSet() { realProps.set(KsqlConfig.KSQL_CONFIG_PROPERTY_PREFIX + "some.unknown.prop", "some.value"); }
@Override public KsqlSecurityContext provide(final ApiSecurityContext apiSecurityContext) { final Optional<KsqlPrincipal> principal = apiSecurityContext.getPrincipal(); final Optional<String> authHeader = apiSecurityContext.getAuthHeader(); final List<Entry<String, String>> requestHeaders = apiSecurityContext.getRequestHeaders(); // A user context is not necessary if a user context provider is not present or the user // principal is missing. If a failed authentication attempt results in a missing principle, // then the authentication plugin will have already failed the connection before calling // this method. Therefore, if we've reached this method with a missing principle, then this // must be a valid connection that does not require authentication. // For these cases, we create a default service context that the missing user can use. final boolean requiresUserContext = securityExtension != null && securityExtension.getUserContextProvider().isPresent() && principal.isPresent(); if (!requiresUserContext) { return new KsqlSecurityContext( principal, defaultServiceContextFactory.create( ksqlConfig, authHeader, schemaRegistryClientFactory, connectClientFactory, sharedClient, requestHeaders, principal) ); } return securityExtension.getUserContextProvider() .map(provider -> new KsqlSecurityContext( principal, userServiceContextFactory.create( ksqlConfig, authHeader, provider.getKafkaClientSupplier(principal.get()), provider.getSchemaRegistryClientFactory(principal.get()), connectClientFactory, sharedClient, requestHeaders, principal))) .get(); }
@Test public void shouldPassUserPrincipalToUserFactory() { // Given: when(securityExtension.getUserContextProvider()).thenReturn(Optional.of(userContextProvider)); // When: ksqlSecurityContextProvider.provide(apiSecurityContext); // Then: verify(userServiceContextFactory) .create(any(), any(), any(), any(), any(), any(), any(), eq(Optional.of(user1))); }
public static Optional<Object> getAdjacentValue(Type type, Object value, boolean isPrevious) { if (!type.isOrderable()) { throw new IllegalStateException("Type is not orderable: " + type); } requireNonNull(value, "value is null"); if (type.equals(BIGINT) || type instanceof TimestampType) { return getBigintAdjacentValue(value, isPrevious); } if (type.equals(INTEGER) || type.equals(DATE)) { return getIntegerAdjacentValue(value, isPrevious); } if (type.equals(SMALLINT)) { return getSmallIntAdjacentValue(value, isPrevious); } if (type.equals(TINYINT)) { return getTinyIntAdjacentValue(value, isPrevious); } if (type.equals(DOUBLE)) { return getDoubleAdjacentValue(value, isPrevious); } if (type.equals(REAL)) { return getRealAdjacentValue(value, isPrevious); } return Optional.empty(); }
@Test public void testPreviousValueForOtherType() { assertThat(getAdjacentValue(VARCHAR, "anystr", true)) .isEmpty(); assertThat(getAdjacentValue(BOOLEAN, true, true)) .isEmpty(); assertThat(getAdjacentValue(TIME, 123L, true)) .isEmpty(); assertThat(getAdjacentValue(DecimalType.createDecimalType(8, 2), 12345L, true)) .isEmpty(); assertThat(getAdjacentValue(DecimalType.createDecimalType(20, 2), encodeScaledValue(new BigDecimal(111111111111111123.45)), true)) .isEmpty(); }
@Override public ParsedLine parse(final String line, final int cursor, final ParseContext context) { final String trimmed = line.trim(); final int adjCursor = adjustCursor(line, trimmed, cursor); return delegate.parse(trimmed, adjCursor, context); }
@Test public void shouldAdjustCursorWhenInTrimmedResult() { expect(delegate.parse(anyString(), eq(2), anyObject())) .andReturn(parsedLine); replay(delegate); parser.parse(" line ", 4, UNSPECIFIED); }
@Override protected MemberData memberData(Subscription subscription) { // In ConsumerProtocolSubscription v2 or higher, we can take member data from fields directly if (subscription.generationId().isPresent()) { return new MemberData(subscription.ownedPartitions(), subscription.generationId()); } ByteBuffer buffer = subscription.userData(); Optional<Integer> encodedGeneration; if (buffer == null) { encodedGeneration = Optional.empty(); } else { try { Struct struct = COOPERATIVE_STICKY_ASSIGNOR_USER_DATA_V0.read(buffer); encodedGeneration = Optional.of(struct.getInt(GENERATION_KEY_NAME)); } catch (Exception e) { encodedGeneration = Optional.of(DEFAULT_GENERATION); } } return new MemberData(subscription.ownedPartitions(), encodedGeneration, subscription.rackId()); }
@Test public void testDecodeGeneration() { Subscription subscription = new Subscription(topics(topic)); assertFalse(((CooperativeStickyAssignor) assignor).memberData(subscription).generation.isPresent()); }
@Override public String name() { return blob.getName(); }
@Test public void testName() { String name = "test-name"; when(blob.getName()).thenReturn(name); assertThat(artifact.name()).isEqualTo(name); }
@Override public Optional<TrackPair> clean(TrackPair trackPair) { requireNonNull(trackPair, "The input track pair is null"); requireNonNull(trackPair.track1(), "trackPair.first() is null"); requireNonNull(trackPair.track2(), "trackPair.second() is null"); Optional<Track> smoothedFirst = trackSmoother.clean(trackPair.track1()); Optional<Track> smoothedSecond = trackSmoother.clean(trackPair.track2()); boolean bothPresent = (smoothedFirst.isPresent() && smoothedSecond.isPresent()); return (bothPresent) ? Optional.of(TrackPair.of(smoothedFirst.get(), smoothedSecond.get())) : Optional.empty(); }
@Test public void rejectingOneTrackProducesEmptyOptional() { Track<String> track1 = testTrack(15); Track<String> track2 = testTrack(5); TrackPairCleaner instance = new TrackPairCleaner(new SizeBasedCleaner(10)); Optional<TrackPair> result = instance.clean(TrackPair.of(track1, track2)); assertFalse(result.isPresent()); }
static String getSparkInternalAccumulatorKey(final String prestoKey) { if (prestoKey.contains(SPARK_INTERNAL_ACCUMULATOR_PREFIX)) { int index = prestoKey.indexOf(PRESTO_NATIVE_OPERATOR_STATS_SEP); return prestoKey.substring(index); } String[] prestoKeyParts = prestoKey.split("\\."); int prestoKeyPartsLength = prestoKeyParts.length; if (prestoKeyPartsLength < 2) { log.debug("Fail to build spark internal key for %s format not supported", prestoKey); return ""; } String prestoNewKey = String.format("%1$s%2$s", prestoKeyParts[0], prestoKeyParts[prestoKeyPartsLength - 1]); if (prestoNewKey.contains("_")) { prestoNewKey = CaseUtils.toCamelCase(prestoKey, false, '_'); } return String.format("%1$s%2$s%3$s", SPARK_INTERNAL_ACCUMULATOR_PREFIX, PRESTO_NATIVE_OPERATOR_STATS_PREFIX, prestoNewKey); }
@Test public void getSparkInternalAccumulatorKeyUnsupportedTest() { String expected = ""; String prestoKey = "UnknownFormat"; String actual = getSparkInternalAccumulatorKey(prestoKey); assertEquals(actual, expected); }
@Override public Integer doCall() throws Exception { int code = execute(); if (code == 0) { // In case of successful execution, we create the working directory if it does not exist to help the tooling // know that it is a Camel JBang project createWorkingDirectoryIfAbsent(); } return code; }
@Test void initBasicYaml() throws Exception { Init initCommand = new Init(new CamelJBangMain()); CommandLine.populateCommand(initCommand, "my.camel.yaml"); int exit = initCommand.doCall(); assertEquals(0, exit); File f = new File("my.camel.yaml"); assertTrue(f.exists(), "Yaml file not created: " + f); f.delete(); }
@ConstantFunction(name = "add", argTypes = {BIGINT, BIGINT}, returnType = BIGINT, isMonotonic = true) public static ConstantOperator addBigInt(ConstantOperator first, ConstantOperator second) { return ConstantOperator.createBigint(Math.addExact(first.getBigint(), second.getBigint())); }
@Test public void addBigInt() { assertEquals(200, ScalarOperatorFunctions.addBigInt(O_BI_100, O_BI_100).getBigint()); }
@Nonnull public static <T> StreamSource<T> connect(@Nonnull Properties properties, @Nonnull FunctionEx<SourceRecord, T> projectionFn) { return connect(properties, projectionFn, DEFAULT_RECONNECT_BEHAVIOR); }
@Test public void should_create_source_with_minimal_properties() { Properties properties = new Properties(); properties.setProperty("name", "some-name"); properties.setProperty("connector.class", "some-name"); StreamSource<SourceRecord> source = connect(properties, rec -> rec); assertThat(source).isNotNull(); }
@Override public boolean removeAll(Collection<?> c) { return c.stream() .map(e -> map.remove(e) != null) .reduce(Boolean::logicalOr) .orElse(false); }
@Test public void testRemoveAll() { ExtendedSet<TestValue> set = new ExtendedSet<>(Maps.newConcurrentMap()); TestValue val = new TestValue("foo", 1); assertTrue(set.add(val)); TestValue nextval = new TestValue("goo", 2); assertTrue(set.add(nextval)); TestValue finalval = new TestValue("shoo", 3); assertTrue(set.add(finalval)); ArrayList<TestValue> vals = new ArrayList<TestValue>(); vals.add(nextval); vals.add(finalval); vals.add(new TestValue("who", 4)); assertTrue(set.removeAll(vals)); assertFalse(set.contains(nextval)); assertFalse(set.contains(finalval)); }
@Override public void onError(Throwable exception) { onFatalError(exception); }
@Test void testOnError() throws Exception { new Context() { { final Throwable fatalError = new Throwable("Testing fatal error"); runTest( () -> { runInMainThread(() -> getResourceManager().onError(fatalError)); final Throwable reportedError = getFatalErrorHandler() .getErrorFuture() .get(TIMEOUT_SEC, TimeUnit.SECONDS); assertThat(reportedError).isSameAs(fatalError); }); } }; }
public static StructType groupingKeyType(Schema schema, Collection<PartitionSpec> specs) { return buildPartitionProjectionType("grouping key", specs, commonActiveFieldIds(schema, specs)); }
@Test public void testGroupingKeyTypeWithDroppedPartitionFieldInV2Tables() { TestTables.TestTable table = TestTables.create( tableDir, "test", SCHEMA, BY_DATA_CATEGORY_BUCKET_SPEC, V2_FORMAT_VERSION); table.updateSpec().removeField(Expressions.bucket("category", 8)).commit(); assertThat(table.specs()).hasSize(2); StructType expectedType = StructType.of(NestedField.optional(1000, "data", Types.StringType.get())); StructType actualType = Partitioning.groupingKeyType(table.schema(), table.specs().values()); assertThat(actualType).isEqualTo(expectedType); }
@Override public @Nullable Boolean trySample(@Nullable M method) { if (method == null) return null; Sampler sampler = methodToSamplers.get(method); if (sampler == NULL_SENTINEL) return null; if (sampler != null) return sampler.isSampled(0L); // counting sampler ignores the input sampler = samplerOfMethod(method); if (sampler == null) { methodToSamplers.put(method, NULL_SENTINEL); return null; } Sampler previousSampler = methodToSamplers.putIfAbsent(method, sampler); if (previousSampler != null) sampler = previousSampler; // lost race, use the existing counter return sampler.isSampled(0L); // counting sampler ignores the input }
@Test void samplerLoadsLazy() { assertThat(declarativeSampler.methodToSamplers) .isEmpty(); declarativeSampler.trySample(traced(1.0f, 0, true)); assertThat(declarativeSampler.methodToSamplers) .hasSize(1); declarativeSampler.trySample(traced(0.0f, 0, true)); assertThat(declarativeSampler.methodToSamplers) .hasSize(2); }
@Override public void handlerRule(final RuleData ruleData) { Optional.ofNullable(ruleData.getHandle()).ifPresent(s -> { final KeyAuthRuleHandle ruleHandle = GsonUtils.getInstance().fromJson(s, KeyAuthRuleHandle.class); CACHED_HANDLE.get().cachedHandle(CacheKeyUtils.INST.getKey(ruleData), ruleHandle); }); }
@Test public void testHandlerRule() { RuleData ruleData = new RuleData(); ruleData.setId("keyAuthRule"); ruleData.setSelectorId("keyAuth"); ruleData.setHandle(handleString); keyAuthPluginDataHandler.handlerRule(ruleData); KeyAuthRuleHandle testRuleHandle = KeyAuthPluginDataHandler.CACHED_HANDLE.get().obtainHandle(CacheKeyUtils.INST.getKey(ruleData)); assertTrue(StringUtils.equals(testRuleHandle.getKeyName(), "apiKey")); assertTrue(StringUtils.equals(testRuleHandle.getKey(), "key")); }
@Override public DataTableType dataTableType() { return dataTableType; }
@Test void can_define_table_cell_transformer_with_empty_pattern() throws NoSuchMethodException { Method method = JavaDataTableTypeDefinitionTest.class.getMethod("converts_table_cell_to_string", String.class); JavaDataTableTypeDefinition definition = new JavaDataTableTypeDefinition(method, lookup, new String[0]); assertThat(definition.dataTableType().transform(emptyTable.cells()), is(asList( asList("converts_table_cell_to_string=a", "converts_table_cell_to_string=[empty]"), asList("converts_table_cell_to_string=[empty]", "converts_table_cell_to_string=d")))); }
@SuppressWarnings({"deprecation", "checkstyle:linelength"}) public void convertSiteProperties(Configuration conf, Configuration yarnSiteConfig, boolean drfUsed, boolean enableAsyncScheduler, boolean userPercentage, FSConfigToCSConfigConverterParams.PreemptionMode preemptionMode) { yarnSiteConfig.set(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class.getCanonicalName()); if (conf.getBoolean( FairSchedulerConfiguration.CONTINUOUS_SCHEDULING_ENABLED, FairSchedulerConfiguration.DEFAULT_CONTINUOUS_SCHEDULING_ENABLED)) { yarnSiteConfig.setBoolean( CapacitySchedulerConfiguration.SCHEDULE_ASYNCHRONOUSLY_ENABLE, true); int interval = conf.getInt( FairSchedulerConfiguration.CONTINUOUS_SCHEDULING_SLEEP_MS, FairSchedulerConfiguration.DEFAULT_CONTINUOUS_SCHEDULING_SLEEP_MS); yarnSiteConfig.setInt(PREFIX + "schedule-asynchronously.scheduling-interval-ms", interval); } // This should be always true to trigger cs auto // refresh queue. yarnSiteConfig.setBoolean( YarnConfiguration.RM_SCHEDULER_ENABLE_MONITORS, true); if (conf.getBoolean(FairSchedulerConfiguration.PREEMPTION, FairSchedulerConfiguration.DEFAULT_PREEMPTION)) { preemptionEnabled = true; String policies = addMonitorPolicy(ProportionalCapacityPreemptionPolicy. class.getCanonicalName(), yarnSiteConfig); yarnSiteConfig.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES, policies); int waitTimeBeforeKill = conf.getInt( FairSchedulerConfiguration.WAIT_TIME_BEFORE_KILL, FairSchedulerConfiguration.DEFAULT_WAIT_TIME_BEFORE_KILL); yarnSiteConfig.setInt( CapacitySchedulerConfiguration.PREEMPTION_WAIT_TIME_BEFORE_KILL, waitTimeBeforeKill); long waitBeforeNextStarvationCheck = conf.getLong( FairSchedulerConfiguration.WAIT_TIME_BEFORE_NEXT_STARVATION_CHECK_MS, FairSchedulerConfiguration.DEFAULT_WAIT_TIME_BEFORE_NEXT_STARVATION_CHECK_MS); yarnSiteConfig.setLong( CapacitySchedulerConfiguration.PREEMPTION_MONITORING_INTERVAL, waitBeforeNextStarvationCheck); } else { if (preemptionMode == FSConfigToCSConfigConverterParams.PreemptionMode.NO_POLICY) { yarnSiteConfig.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES, ""); } } // For auto created queue's auto deletion. if (!userPercentage) { String policies = addMonitorPolicy(AutoCreatedQueueDeletionPolicy. class.getCanonicalName(), yarnSiteConfig); yarnSiteConfig.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES, policies); // Set the expired for deletion interval to 10s, consistent with fs. yarnSiteConfig.setInt(CapacitySchedulerConfiguration. AUTO_CREATE_CHILD_QUEUE_EXPIRED_TIME, 10); } if (conf.getBoolean(FairSchedulerConfiguration.ASSIGN_MULTIPLE, FairSchedulerConfiguration.DEFAULT_ASSIGN_MULTIPLE)) { yarnSiteConfig.setBoolean( CapacitySchedulerConfiguration.ASSIGN_MULTIPLE_ENABLED, true); } else { yarnSiteConfig.setBoolean( CapacitySchedulerConfiguration.ASSIGN_MULTIPLE_ENABLED, false); } // Make auto cs conf refresh enabled. yarnSiteConfig.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES, addMonitorPolicy(QueueConfigurationAutoRefreshPolicy .class.getCanonicalName(), yarnSiteConfig)); int maxAssign = conf.getInt(FairSchedulerConfiguration.MAX_ASSIGN, FairSchedulerConfiguration.DEFAULT_MAX_ASSIGN); if (maxAssign != FairSchedulerConfiguration.DEFAULT_MAX_ASSIGN) { yarnSiteConfig.setInt( CapacitySchedulerConfiguration.MAX_ASSIGN_PER_HEARTBEAT, maxAssign); } float localityThresholdNode = conf.getFloat( FairSchedulerConfiguration.LOCALITY_THRESHOLD_NODE, FairSchedulerConfiguration.DEFAULT_LOCALITY_THRESHOLD_NODE); if (localityThresholdNode != FairSchedulerConfiguration.DEFAULT_LOCALITY_THRESHOLD_NODE) { yarnSiteConfig.setFloat(CapacitySchedulerConfiguration.NODE_LOCALITY_DELAY, localityThresholdNode); } float localityThresholdRack = conf.getFloat( FairSchedulerConfiguration.LOCALITY_THRESHOLD_RACK, FairSchedulerConfiguration.DEFAULT_LOCALITY_THRESHOLD_RACK); if (localityThresholdRack != FairSchedulerConfiguration.DEFAULT_LOCALITY_THRESHOLD_RACK) { yarnSiteConfig.setFloat( CapacitySchedulerConfiguration.RACK_LOCALITY_ADDITIONAL_DELAY, localityThresholdRack); } if (conf.getBoolean(FairSchedulerConfiguration.SIZE_BASED_WEIGHT, FairSchedulerConfiguration.DEFAULT_SIZE_BASED_WEIGHT)) { sizeBasedWeight = true; } if (drfUsed) { yarnSiteConfig.set( CapacitySchedulerConfiguration.RESOURCE_CALCULATOR_CLASS, DominantResourceCalculator.class.getCanonicalName()); } if (enableAsyncScheduler) { yarnSiteConfig.setBoolean(CapacitySchedulerConfiguration.SCHEDULE_ASYNCHRONOUSLY_ENABLE, true); } }
@Test public void testSiteDrfDisabledConversion() { converter.convertSiteProperties(yarnConfig, yarnConvertedConfig, false, false, false, null); assertEquals("Resource calculator type", DefaultResourceCalculator.class, yarnConvertedConfig.getClass( CapacitySchedulerConfiguration.RESOURCE_CALCULATOR_CLASS, CapacitySchedulerConfiguration.DEFAULT_RESOURCE_CALCULATOR_CLASS)); }
@Override public ComponentCreationData createProjectAndBindToDevOpsPlatform(DbSession dbSession, CreationMethod creationMethod, Boolean monorepo, @Nullable String projectKey, @Nullable String projectName) { String pat = findPersonalAccessTokenOrThrow(dbSession, almSettingDto); String workspace = ofNullable(almSettingDto.getAppId()) .orElseThrow(() -> new IllegalArgumentException(String.format("workspace for alm setting %s is missing", almSettingDto.getKey()))); Repository repo = bitbucketCloudRestClient.getRepo(pat, workspace, devOpsProjectDescriptor.repositoryIdentifier()); ComponentCreationData componentCreationData = projectCreator.createProject( dbSession, getProjectKey(workspace, projectKey, repo), getProjectName(projectName, repo), repo.getMainBranch().getName(), creationMethod); ProjectDto projectDto = Optional.ofNullable(componentCreationData.projectDto()).orElseThrow(); createProjectAlmSettingDto(dbSession, repo.getSlug(), projectDto, almSettingDto, monorepo); return componentCreationData; }
@Test void createProjectAndBindToDevOpsPlatform_whenRepoFoundOnBitbucket_successfullyCreatesProject() { mockPatForUser(); when(almSettingDto.getAppId()).thenReturn(WORKSPACE); mockBitbucketCloudRepository(); mockProjectCreation("projectKey", "projectName"); underTest.createProjectAndBindToDevOpsPlatform(mock(DbSession.class), CreationMethod.ALM_IMPORT_API, true, "projectKey", "projectName"); ArgumentCaptor<ProjectAlmSettingDto> projectAlmSettingCaptor = ArgumentCaptor.forClass(ProjectAlmSettingDto.class); verify(dbClient.projectAlmSettingDao()).insertOrUpdate(any(), projectAlmSettingCaptor.capture(), eq(ALM_SETTING_KEY), eq("projectName"), eq("projectKey")); ProjectAlmSettingDto createdProjectAlmSettingDto = projectAlmSettingCaptor.getValue(); assertThat(createdProjectAlmSettingDto.getAlmSettingUuid()).isEqualTo(ALM_SETTING_UUID); assertThat(createdProjectAlmSettingDto.getAlmRepo()).isEqualTo(REPOSITORY_SLUG); assertThat(createdProjectAlmSettingDto.getProjectUuid()).isEqualTo(PROJECT_UUID); assertThat(createdProjectAlmSettingDto.getMonorepo()).isTrue(); }
public static FacilitySet from(Properties props) { Set<Facility> set = new HashSet<>(); for (Facility facility : Facility.values()) { String onOrOff = props.getProperty( facility.name(), "OFF" //default value when facility is missing ); if (onOrOff.equalsIgnoreCase("ON")) { set.add(facility); } else if (onOrOff.equalsIgnoreCase("OFF")) { //do nothing } else { throw new IllegalArgumentException( "The property " + onOrOff + " for facility " + facility + " is invalid. The only valid values are \"ON\" or \"OFF\"." ); } } return new FacilitySet(set); }
@Test public void testInvalidValueInFacilitySetFile() throws Exception { /* * Ensure we prohibit facility set properties files that contain bad values. */ String TEST_FILE = "badFacilitySet.properties"; Optional<File> file = FileUtils.getResourceAsFile(FacilitySetTest.class, TEST_FILE); assertTrue(file.isPresent()); assertThrows( IllegalArgumentException.class, () -> FacilitySet.from(file.get().getAbsolutePath()), "Creating a facility set from an improperly formatted file should fail" ); }
@Override public ObjectNode encode(K8sNetwork network, CodecContext context) { checkNotNull(network, "Kubernetes network cannot be null"); ObjectNode result = context.mapper().createObjectNode() .put(NETWORK_ID, network.networkId()) .put(NAME, network.name()) .put(CIDR, network.cidr()); if (network.type() != null) { result.put(TYPE, network.type().name()); } if (network.segmentId() != null) { result.put(SEGMENT_ID, network.segmentId()); } if (network.mtu() != null) { result.put(MTU, network.mtu()); } return result; }
@Test public void testK8sNetworkEncode() { K8sNetwork network = DefaultK8sNetwork.builder() .networkId("network-1") .name("network-1") .segmentId("1") .type(K8sNetwork.Type.VXLAN) .cidr("10.10.0.0/24") .mtu(1500) .build(); ObjectNode nodeJson = k8sNetworkCodec.encode(network, context); assertThat(nodeJson, matchesK8sNetwork(network)); }
public RuntimeOptionsBuilder parse(String... args) { return parse(Arrays.asList(args)); }
@Test void assigns_wip_short() { RuntimeOptions options = parser .parse("-w") .build(); assertThat(options.isWip(), is(true)); }
@Override public Reader<E> createReader( Configuration config, FSDataInputStream stream, long fileLen, long splitEnd) throws IOException { // current version does not support splitting. checkNotSplit(fileLen, splitEnd); return new AvroParquetRecordReader<E>( AvroParquetReader.<E>builder(new ParquetInputFile(stream, fileLen)) .withDataModel(getDataModel()) .withConf(HadoopUtils.getHadoopConfiguration(config)) .build()); }
@Test void testReflectReadFromGenericRecords() throws IOException { StreamFormat.Reader<User> reader = createReader( AvroParquetReaders.forReflectRecord(User.class), new Configuration(), userPath, 0, userPath.getFileSystem().getFileStatus(userPath).getLen()); for (GenericRecord expected : userRecords) { assertUserEquals(reader.read(), expected); } }
public static String sanitizeString(@NonNull String input) { // replace the guava method // return CharMatcher.JAVA_ISO_CONTROL.and(CharMatcher.anyOf("\r\n\t")).removeFrom(input); if (StringUtils.isEmpty(input)) { return null; } StringCharacterIterator iter = new StringCharacterIterator(input); StringBuilder sb = new StringBuilder(input.length()); for(char c = iter.first(); c != CharacterIterator.DONE; c = iter.next()) { boolean match = Character.isISOControl(c); switch (c) { case '\r': case '\n': case '\t': if (match) continue; default: sb.append(c); } } return sb.toString(); }
@Test public void sanitize() throws Exception { Assert.assertEquals("hello vivek :)", JSON.sanitizeString("hello\r vivek\n\t :)")); }
public void validate(String effectivePath, String artifactMD5, ChecksumValidationPublisher checksumValidationPublisher) { if (artifactMd5Checksums == null) { checksumValidationPublisher.md5ChecksumFileNotFound(); return; } String expectedMd5 = artifactMd5Checksums.md5For(effectivePath); if (StringUtils.isBlank(expectedMd5)) { checksumValidationPublisher.md5NotFoundFor(effectivePath); return; } if (expectedMd5.equals(artifactMD5)) { checksumValidationPublisher.md5Match(effectivePath); } else { checksumValidationPublisher.md5Mismatch(effectivePath); } }
@Test public void shouldCallbackWhenMd5IsNotFound() throws IOException { when(checksums.md5For("path")).thenReturn(null); final ByteArrayInputStream stream = new ByteArrayInputStream("foo".getBytes()); new ChecksumValidator(checksums).validate("path", CachedDigestUtils.md5Hex(stream), checksumValidationPublisher); verify(checksumValidationPublisher).md5NotFoundFor("path"); }
static void verifyDeterministic(ProtoCoder<?> coder) throws NonDeterministicException { Class<? extends Message> message = coder.getMessageType(); ExtensionRegistry registry = coder.getExtensionRegistry(); Set<Descriptor> descriptors = getRecursiveDescriptorsForClass(message, registry); for (Descriptor d : descriptors) { for (FieldDescriptor fd : d.getFields()) { // If there is a transitively reachable Protocol Buffers map field, then this object cannot // be encoded deterministically. if (fd.isMapField()) { String reason = String.format( "Protocol Buffers message %s transitively includes Map field %s (from file %s)." + " Maps cannot be deterministically encoded.", message.getName(), fd.getFullName(), fd.getFile().getFullName()); throw new NonDeterministicException(coder, reason); } } } }
@Test public void testDurationIsDeterministic() throws NonDeterministicException { // Duration can be encoded deterministically. verifyDeterministic(ProtoCoder.of(Duration.class)); }
public StatusResponse userLogin(OpenIdSession session, Long accountId, String authenticationLevel, String authenticationStatus){ var bsnResponse = adClient.getBsn(accountId); session.setAuthenticationLevel(authenticationLevel); session.setAuthenticationState(authenticationStatus); session.setAccountId(accountId); session.setBsn(bsnResponse.get("bsn")); openIdRepository.save(session); return new StatusResponse("OK"); }
@Test void userLoginTest() { OpenIdSession openIdSession = new OpenIdSession(); var bsnResponse = Map.of("bsn", "1"); when(adClient.getBsn(1L)).thenReturn(bsnResponse); StatusResponse response = openIdService.userLogin(openIdSession, 1L, "10", "authenticated"); assertEquals("OK", response.getStatus()); }
public Object execute(ProceedingJoinPoint proceedingJoinPoint, Method method, String fallbackMethodValue, CheckedSupplier<Object> primaryFunction) throws Throwable { String fallbackMethodName = spelResolver.resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue); FallbackMethod fallbackMethod = null; if (StringUtils.hasLength(fallbackMethodName)) { try { fallbackMethod = FallbackMethod .create(fallbackMethodName, method, proceedingJoinPoint.getArgs(), proceedingJoinPoint.getTarget(), proceedingJoinPoint.getThis()); } catch (NoSuchMethodException ex) { logger.warn("No fallback method match found", ex); } } if (fallbackMethod == null) { return primaryFunction.get(); } else { return fallbackDecorators.decorate(fallbackMethod, primaryFunction).get(); } }
@Test public void testPrimaryMethodExecutionWithoutFallback() throws Throwable { Method method = this.getClass().getMethod("getName", String.class); final CheckedSupplier<Object> primaryFunction = () -> getName("Name"); final String fallbackMethodValue = ""; when(proceedingJoinPoint.getArgs()).thenReturn(new Object[]{}); when(proceedingJoinPoint.getTarget()).thenReturn(this); when(spelResolver.resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue)).thenReturn(fallbackMethodValue); when(fallbackDecorators.decorate(any(),eq(primaryFunction))).thenReturn(primaryFunction); final Object result = fallbackExecutor.execute(proceedingJoinPoint, method, fallbackMethodValue, primaryFunction); assertThat(result).isEqualTo("Name"); verify(spelResolver, times(1)).resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue); verify(fallbackDecorators, never()).decorate(any(),any()); }
@Nullable public String tag(String key) { if (key == null) throw new NullPointerException("key == null"); if (key.isEmpty()) throw new IllegalArgumentException("key is empty"); for (int i = 0, length = tagCount * 2; i < length; i += 2) { if (key.equals(tags[i])) return (String) tags[i + 1]; } return null; }
@Test void accessorScansTags() { MutableSpan span = new MutableSpan(); span.tag("http.method", "GET"); span.tag("error", "500"); span.tag("http.path", "/api"); assertThat(span.tag("error")).isEqualTo("500"); assertThat(span.tag("whoops")).isNull(); }
public static StringSetResult create(Set<String> s) { return new AutoValue_StringSetResult(ImmutableSet.copyOf(s)); }
@Test public void create() { // Test that create makes an immutable copy of the given set HashSet<String> modifiableSet = new HashSet<>(Arrays.asList("ab", "cd")); StringSetResult stringSetResult = StringSetResult.create(modifiableSet); // change the initial set. modifiableSet.add("ef"); SetView<String> difference = Sets.difference(modifiableSet, stringSetResult.getStringSet()); assertEquals(1, difference.size()); assertEquals("ef", difference.iterator().next()); assertTrue(Sets.difference(stringSetResult.getStringSet(), modifiableSet).isEmpty()); }
public static Coder<SdkHttpMetadata> sdkHttpMetadata() { return new SdkHttpMetadataCoder(true); }
@Test public void testSdkHttpMetadataDecodeEncodeEquals() throws Exception { SdkHttpMetadata value = buildSdkHttpMetadata(); SdkHttpMetadata clone = CoderUtils.clone(AwsCoders.sdkHttpMetadata(), value); assertThat(clone.getHttpStatusCode(), equalTo(value.getHttpStatusCode())); assertThat(clone.getHttpHeaders(), equalTo(value.getHttpHeaders())); }
@VisibleForTesting // CHECKSTYLE_RULES.OFF: ParameterNumberCheck static ClientOptions createClientOptions( // CHECKSTYLE_RULES.ON: ParameterNumberCheck final String ksqlServerUrl, final String username, final String password, final String sslTrustStoreLocation, final String sslTrustStorePassword, final String sslKeystoreLocation, final String sslKeystorePassword, final String sslKeyPassword, final String sslKeyAlias, final boolean useAlpn, final boolean verifyHost, final Map<String, String> requestHeaders ) { final URL url; try { url = new URL(ksqlServerUrl); } catch (MalformedURLException e) { throw new MigrationException("Invalid ksql server URL: " + ksqlServerUrl); } final ClientOptions options = ClientOptions .create() .setHost(url.getHost()) .setPort(url.getPort()); if (!(username == null || username.isEmpty()) || !(password == null || password.isEmpty())) { options.setBasicAuthCredentials(username, password); } final boolean useTls = ksqlServerUrl.trim().toLowerCase().startsWith("https://"); options.setUseTls(useTls); if (useTls) { options.setTrustStore(sslTrustStoreLocation); options.setTrustStorePassword(sslTrustStorePassword); options.setKeyStore(sslKeystoreLocation); options.setKeyStorePassword(sslKeystorePassword); options.setKeyPassword(sslKeyPassword); options.setKeyAlias(sslKeyAlias); options.setUseAlpn(useAlpn); options.setVerifyHost(verifyHost); } if (requestHeaders != null) { options.setRequestHeaders(requestHeaders); } return options; }
@Test public void shouldCreateNonTlsClientOptions() { // Given: final ClientOptions clientOptions = createClientOptions(NON_TLS_URL, "user", "pass", null, "", null, null, "", "foo", false, true, null); // Then: assertThat(clientOptions.isUseTls(), is(false)); assertThat(clientOptions.getBasicAuthUsername(), is("user")); assertThat(clientOptions.getBasicAuthPassword(), is("pass")); assertThat(clientOptions.getTrustStore(), is("")); assertThat(clientOptions.getTrustStorePassword(), is("")); assertThat(clientOptions.getKeyStore(), is("")); assertThat(clientOptions.getKeyStorePassword(), is("")); assertThat(clientOptions.getKeyPassword(), is("")); assertThat(clientOptions.getKeyAlias(), is("")); assertThat(clientOptions.isUseAlpn(), is(false)); assertThat(clientOptions.isVerifyHost(), is(true)); assertThat(clientOptions.getRequestHeaders(), is(Collections.emptyMap())); }
protected RemotingCommand request(ChannelHandlerContext ctx, RemotingCommand request, ProxyContext context, long timeoutMillis) throws Exception { String brokerName; if (request.getCode() == RequestCode.SEND_MESSAGE_V2) { if (request.getExtFields().get(BROKER_NAME_FIELD_FOR_SEND_MESSAGE_V2) == null) { return RemotingCommand.buildErrorResponse(ResponseCode.VERSION_NOT_SUPPORTED, "Request doesn't have field bname"); } brokerName = request.getExtFields().get(BROKER_NAME_FIELD_FOR_SEND_MESSAGE_V2); } else { if (request.getExtFields().get(BROKER_NAME_FIELD) == null) { return RemotingCommand.buildErrorResponse(ResponseCode.VERSION_NOT_SUPPORTED, "Request doesn't have field bname"); } brokerName = request.getExtFields().get(BROKER_NAME_FIELD); } if (request.isOnewayRPC()) { messagingProcessor.requestOneway(context, brokerName, request, timeoutMillis); return null; } messagingProcessor.request(context, brokerName, request, timeoutMillis) .thenAccept(r -> writeResponse(ctx, context, request, r)) .exceptionally(t -> { writeErrResponse(ctx, context, request, t); return null; }); return null; }
@Test public void testRequestInvalid() throws Exception { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.PULL_MESSAGE, null); request.addExtField("test", "test"); RemotingCommand remotingCommand = remotingActivity.request(ctx, request, null, 10000); assertThat(remotingCommand.getCode()).isEqualTo(ResponseCode.VERSION_NOT_SUPPORTED); verify(ctx, never()).writeAndFlush(any()); }
public <T extends VFSConnectionDetails> boolean test( @NonNull ConnectionManager manager, @NonNull T details, @Nullable VFSConnectionTestOptions options ) throws KettleException { if ( options == null ) { options = new VFSConnectionTestOptions(); } // The specified connection details may not exist saved in the meta-store, // but still needs to have a non-empty name in it, to be able to form a temporary PVFS URI. if ( StringUtils.isEmpty( details.getName() ) ) { return false; } VFSConnectionProvider<T> provider = getExistingProvider( manager, details ); if ( !provider.test( details ) ) { return false; } if ( !details.isRootPathSupported() || options.isRootPathIgnored() ) { return true; } String resolvedRootPath; try { resolvedRootPath = getResolvedRootPath( details ); } catch ( KettleException e ) { // Invalid root path. return false; } if ( resolvedRootPath == null ) { return !details.isRootPathRequired(); } // Ensure that root path exists and is a folder. return isFolder( getConnectionRootProviderFileObject( manager, provider, details ) ); }
@Test public void testTestReturnsTrueWhenRootPathInvalidAndConnectionDoesNotSupportRootPath() throws KettleException { when( vfsConnectionDetails.isRootPathSupported() ).thenReturn( false ); when( vfsConnectionDetails.getRootPath() ).thenReturn( "../invalid" ); assertTrue( vfsConnectionManagerHelper.test( connectionManager, vfsConnectionDetails, getTestOptionsCheckRootPath() ) ); }
public static String toJson(Message message) { StringWriter json = new StringWriter(); try (JsonWriter jsonWriter = JsonWriter.of(json)) { write(message, jsonWriter); } return json.toString(); }
@Test public void write_array() { TestArray msg = TestArray.newBuilder() .addStrings("one").addStrings("two") .addNesteds(NestedMsg.newBuilder().setLabel("nestedOne")).addNesteds(NestedMsg.newBuilder().setLabel("nestedTwo")) .build(); assertThat(toJson(msg)) .isEqualTo("{\"strings\":[\"one\",\"two\"],\"nesteds\":[{\"label\":\"nestedOne\"},{\"label\":\"nestedTwo\"}]}"); }
@Override public long getIndexedQueryCount() { throw new UnsupportedOperationException("Queries on replicated maps are not supported."); }
@Test(expected = UnsupportedOperationException.class) public void testIndexedQueryCount() { localReplicatedMapStats.getIndexedQueryCount(); }
public static ImmutableSet<HttpUrl> allSubPaths(String url) { return allSubPaths(HttpUrl.parse(url)); }
@Test public void allSubPaths_whenSingleSubPathsNoTrailingSlash_returnsExpectedUrl() { assertThat(allSubPaths("http://localhost/a")) .containsExactly(HttpUrl.parse("http://localhost/"), HttpUrl.parse("http://localhost/a/")); }
@CanDistro @PutMapping @TpsControl(pointName = "NamingInstanceUpdate", name = "HttpNamingInstanceUpdate") @Secured(action = ActionTypes.WRITE) public Result<String> update(InstanceForm instanceForm) throws NacosException { // check param instanceForm.validate(); checkWeight(instanceForm.getWeight()); // build instance Instance instance = buildInstance(instanceForm); instanceServiceV2.updateInstance(instanceForm.getNamespaceId(), buildCompositeServiceName(instanceForm), instance); NotifyCenter.publishEvent( new UpdateInstanceTraceEvent(System.currentTimeMillis(), NamingRequestUtil.getSourceIp(), instanceForm.getNamespaceId(), instanceForm.getGroupName(), instanceForm.getServiceName(), instance.getIp(), instance.getPort(), instance.getMetadata())); return Result.success("ok"); }
@Test void updateInstance() throws Exception { InstanceForm instanceForm = new InstanceForm(); instanceForm.setNamespaceId(TEST_NAMESPACE); instanceForm.setGroupName("DEFAULT_GROUP"); instanceForm.setServiceName("test-service"); instanceForm.setIp(TEST_IP); instanceForm.setClusterName(TEST_CLUSTER_NAME); instanceForm.setPort(9999); instanceForm.setHealthy(true); instanceForm.setWeight(1.0); instanceForm.setEnabled(true); instanceForm.setMetadata(TEST_METADATA); instanceForm.setEphemeral(true); Result<String> result = instanceControllerV2.update(instanceForm); verify(instanceServiceV2).updateInstance(eq(TEST_NAMESPACE), eq(TEST_SERVICE_NAME), any()); assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode()); assertEquals("ok", result.getData()); TimeUnit.SECONDS.sleep(1); assertEquals(UpdateInstanceTraceEvent.class, eventReceivedClass); }
@Override public void handlerAdded(ChannelHandlerContext ctx) throws Exception { if (acceptForeignIp) { return; } // the anonymous access is enabled by default, permission level is PUBLIC // if allow anonymous access, return if (qosConfiguration.isAllowAnonymousAccess()) { return; } final InetAddress inetAddress = ((InetSocketAddress) ctx.channel().remoteAddress()).getAddress(); // loopback address, return if (inetAddress.isLoopbackAddress()) { return; } // the ip is in the whitelist, return if (checkForeignIpInWhiteList(inetAddress)) { return; } ByteBuf cb = Unpooled.wrappedBuffer((QosConstants.BR_STR + "Foreign Ip Not Permitted, Consider Config It In Whitelist." + QosConstants.BR_STR) .getBytes()); ctx.writeAndFlush(cb).addListener(ChannelFutureListener.CLOSE); }
@Test void shouldShowIpNotPermittedMsg_GivenAcceptForeignIpFalseAndNotMatchWhiteList() throws Exception { ChannelHandlerContext context = mock(ChannelHandlerContext.class); Channel channel = mock(Channel.class); when(context.channel()).thenReturn(channel); InetAddress addr = mock(InetAddress.class); when(addr.isLoopbackAddress()).thenReturn(false); when(addr.getHostAddress()).thenReturn("179.23.44.1"); InetSocketAddress address = new InetSocketAddress(addr, 12345); when(channel.remoteAddress()).thenReturn(address); ChannelFuture future = mock(ChannelFuture.class); when(context.writeAndFlush(any(ByteBuf.class))).thenReturn(future); ForeignHostPermitHandler handler = new ForeignHostPermitHandler(QosConfiguration.builder() .acceptForeignIp(false) .acceptForeignIpWhitelist("175.23.44.1 , 192.168.1.192/26") .anonymousAccessPermissionLevel(PermissionLevel.NONE.name()) .build()); handler.handlerAdded(context); ArgumentCaptor<ByteBuf> captor = ArgumentCaptor.forClass(ByteBuf.class); verify(context).writeAndFlush(captor.capture()); assertThat( new String(captor.getValue().array()), containsString("Foreign Ip Not Permitted, Consider Config It In Whitelist")); verify(future).addListener(ChannelFutureListener.CLOSE); }
@Override public void failover(NamedNode master) { connection.sync(RedisCommands.SENTINEL_FAILOVER, master.getName()); }
@Test public void testFailover() throws InterruptedException { Collection<RedisServer> masters = connection.masters(); connection.failover(masters.iterator().next()); Thread.sleep(10000); RedisServer newMaster = connection.masters().iterator().next(); assertThat(masters.iterator().next().getPort()).isNotEqualTo(newMaster.getPort()); }
@Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { if (remaining.split("/").length > 1) { throw new IllegalArgumentException("Invalid URI: " + URISupport.sanitizeUri(uri)); } SplunkHECEndpoint answer = new SplunkHECEndpoint(uri, this, new SplunkHECConfiguration()); setProperties(answer, parameters); answer.setSplunkURL(remaining); return answer; }
@Test public void testIpAddressValid() throws Exception { SplunkHECEndpoint endpoint = (SplunkHECEndpoint) component.createEndpoint( "splunk-hec:192.168.0.1:18808?token=11111111-1111-1111-1111-111111111111"); endpoint.init(); assertEquals("192.168.0.1:18808", endpoint.getSplunkURL()); assertEquals("11111111-1111-1111-1111-111111111111", endpoint.getConfiguration().getToken()); }
public List<Service> importServiceDefinition(String repositoryUrl, Secret repositorySecret, boolean disableSSLValidation, boolean mainArtifact) throws MockRepositoryImportException { log.info("Importing service definitions from {}", repositoryUrl); File localFile = null; Map<String, List<String>> fileProperties = null; if (repositoryUrl.startsWith("http")) { try { HTTPDownloader.FileAndHeaders fileAndHeaders = HTTPDownloader .handleHTTPDownloadToFileAndHeaders(repositoryUrl, repositorySecret, disableSSLValidation); localFile = fileAndHeaders.getLocalFile(); fileProperties = fileAndHeaders.getResponseHeaders(); } catch (IOException ioe) { throw new MockRepositoryImportException(repositoryUrl + " cannot be downloaded", ioe); } } else { // Simply build localFile from repository url. localFile = new File(repositoryUrl); } RelativeReferenceURLBuilder referenceURLBuilder = RelativeReferenceURLBuilderFactory .getRelativeReferenceURLBuilder(fileProperties); String artifactName = referenceURLBuilder.getFileName(repositoryUrl, fileProperties); // Initialize a reference resolver to the folder of this repositoryUrl. ReferenceResolver referenceResolver = new ReferenceResolver(repositoryUrl, repositorySecret, disableSSLValidation, referenceURLBuilder); return importServiceDefinition(localFile, referenceResolver, new ArtifactInfo(artifactName, mainArtifact)); }
@Test void testImportServiceDefinitionMainAndSecondary() { List<Service> services = null; try { File artifactFile = new File( "target/test-classes/io/github/microcks/service/weather-forecast-raw-openapi.yaml"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("weather-forecast-raw-openapi.yaml", true)); } catch (MockRepositoryImportException mrie) { fail("No MockRepositoryImportException should have be thrown"); } assertNotNull(services); assertEquals(1, services.size()); // Inspect Service own attributes. Service importedSvc = services.get(0); assertEquals("WeatherForecast API", importedSvc.getName()); assertEquals("1.1.0", importedSvc.getVersion()); assertEquals("weather-forecast-raw-openapi.yaml", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(1, importedSvc.getOperations().size()); assertNull(importedSvc.getOperations().get(0).getResourcePaths()); // Inspect and check resources. List<Resource> resources = resourceRepository.findByServiceId(importedSvc.getId()); assertEquals(1, resources.size()); Resource resource = resources.get(0); assertEquals("WeatherForecast API-1.1.0.yaml", resource.getName()); assertEquals("weather-forecast-raw-openapi.yaml", resource.getSourceArtifact()); // Inspect and check requests. List<Request> requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(0, requests.size()); // Inspect and check responses. List<Response> responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(0, responses.size()); try { File artifactFile = new File("target/test-classes/io/github/microcks/service/weather-forecast-postman.json"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("weather-forecast-postman.json", false)); } catch (MockRepositoryImportException mrie) { fail("No MockRepositoryImportException should have be thrown"); } // Inspect Service own attributes. importedSvc = services.get(0); assertEquals("WeatherForecast API", importedSvc.getName()); assertEquals("1.1.0", importedSvc.getVersion()); assertEquals("weather-forecast-raw-openapi.yaml", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(1, importedSvc.getOperations().size()); assertEquals(DispatchStyles.URI_ELEMENTS, importedSvc.getOperations().get(0).getDispatcher()); assertEquals(5, importedSvc.getOperations().get(0).getResourcePaths().size()); // Inspect and check resources. resources = resourceRepository.findByServiceId(importedSvc.getId()); assertEquals(2, resources.size()); for (Resource resourceItem : resources) { switch (resourceItem.getType()) { case OPEN_API_SPEC: assertEquals("WeatherForecast API-1.1.0.yaml", resourceItem.getName()); assertEquals("weather-forecast-raw-openapi.yaml", resourceItem.getSourceArtifact()); break; case POSTMAN_COLLECTION: assertEquals("WeatherForecast API-1.1.0.json", resourceItem.getName()); assertEquals("weather-forecast-postman.json", resourceItem.getSourceArtifact()); break; default: fail("Unexpected resource type: " + resourceItem.getType()); } } // Inspect and check requests. requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(5, requests.size()); for (Request request : requests) { assertEquals("weather-forecast-postman.json", request.getSourceArtifact()); } // Inspect and check responses. responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(5, requests.size()); for (Response response : responses) { assertEquals("weather-forecast-postman.json", response.getSourceArtifact()); } }
public static Date parseDate2(String datetimeStr) { if (StringUtils.isEmpty(datetimeStr)) { return null; } try { datetimeStr = datetimeStr.trim(); int len = datetimeStr.length(); if (datetimeStr.contains("-") && datetimeStr.contains(":") && datetimeStr.contains(".")) { // 包含日期+时间+毫秒 // 取毫秒位数 int msLen = len - datetimeStr.indexOf(".") - 1; StringBuilder ms = new StringBuilder(); for (int i = 0; i < msLen; i++) { ms.append("S"); } String formatter = "yyyy-MM-dd HH:mm:ss." + ms; DateTimeFormatter dateTimeFormatter = dateFormatterCache.get(formatter); LocalDateTime dateTime = LocalDateTime.parse(datetimeStr, dateTimeFormatter); return Date.from(dateTime.atZone(ZoneId.systemDefault()).toInstant()); } else if (datetimeStr.contains("-") && datetimeStr.contains(":")) { // 包含日期+时间 // 判断包含时间位数 int i = datetimeStr.indexOf(":"); i = datetimeStr.indexOf(":", i + 1); String formatter; if (i > -1) { formatter = "yyyy-MM-dd HH:mm:ss"; } else { formatter = "yyyy-MM-dd HH:mm"; } DateTimeFormatter dateTimeFormatter = dateFormatterCache.get(formatter); LocalDateTime dateTime = LocalDateTime.parse(datetimeStr, dateTimeFormatter); return Date.from(dateTime.atZone(ZoneId.systemDefault()).toInstant()); } else if (datetimeStr.contains("-")) { // 只包含日期 String formatter = "yyyy-MM-dd"; DateTimeFormatter dateTimeFormatter = dateFormatterCache.get(formatter); LocalDate localDate = LocalDate.parse(datetimeStr, dateTimeFormatter); return Date.from(localDate.atStartOfDay().atZone(ZoneId.systemDefault()).toInstant()); } else if (datetimeStr.contains(":")) { // 只包含时间 String formatter; if (datetimeStr.contains(".")) { // 包含毫秒 int msLen = len - datetimeStr.indexOf(".") - 1; StringBuilder ms = new StringBuilder(); for (int i = 0; i < msLen; i++) { ms.append("S"); } formatter = "HH:mm:ss." + ms; } else { // 判断包含时间位数 int i = datetimeStr.indexOf(":"); i = datetimeStr.indexOf(":", i + 1); if (i > -1) { formatter = "HH:mm:ss"; } else { formatter = "HH:mm"; } } DateTimeFormatter dateTimeFormatter = dateFormatterCache.get(formatter); LocalTime localTime = LocalTime.parse(datetimeStr, dateTimeFormatter); LocalDate localDate = LocalDate.of(1970, Month.JANUARY, 1); LocalDateTime localDateTime = LocalDateTime.of(localDate, localTime); return Date.from(localDateTime.atZone(ZoneId.systemDefault()).toInstant()); } } catch (Throwable e) { logger.error(e.getMessage(), e); } return null; }
@PrepareForTest(StringUtils.class) @Test public void parseDate2InputNotNullOutputNull() throws Exception { // Setup mocks PowerMockito.mockStatic(StringUtils.class); // Arrange final String datetimeStr = "1a 2b 3c"; final Method isEmptyMethod = DTUMemberMatcher.method(StringUtils.class, "isEmpty", String.class); PowerMockito.doReturn(true) .when(StringUtils.class, isEmptyMethod) .withArguments(or(isA(String.class), isNull(String.class))); // Act final Date actual = Util.parseDate2(datetimeStr); // Assert result Assert.assertNull(actual); }
@Override public RowData deserialize(@Nullable byte[] message) throws IOException { if (message == null) { return null; } try { GenericRecord deserialize = nestedSchema.deserialize(message); return (RowData) runtimeConverter.convert(deserialize); } catch (Exception e) { throw new IOException("Failed to deserialize Avro record.", e); } }
@Test void testTimestampTypeNewMapping() throws Exception { final Tuple4<Class<? extends SpecificRecord>, SpecificRecord, GenericRecord, Row> testData = AvroTestUtils.getTimestampTestData(); SpecificDatumWriter<Timestamps> datumWriter = new SpecificDatumWriter<>(Timestamps.class); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null); datumWriter.write((Timestamps) testData.f1, encoder); encoder.flush(); byte[] input = byteArrayOutputStream.toByteArray(); DataType dataType = AvroSchemaConverter.convertToDataType( SpecificData.get().getSchema(Timestamps.class).toString(), false); AvroRowDataSerializationSchema serializationSchema = createSerializationSchema(dataType, AvroEncoding.BINARY, false); AvroRowDataDeserializationSchema deserializationSchema = createDeserializationSchema(dataType, AvroEncoding.BINARY, false); RowData rowData = deserializationSchema.deserialize(input); byte[] output = serializationSchema.serialize(rowData); RowData rowData2 = deserializationSchema.deserialize(output); assertThat(rowData2).isEqualTo(rowData); assertThat(rowData.getTimestamp(2, 3).toLocalDateTime().toString()) .isEqualTo("2014-03-01T12:12:12.321"); assertThat(rowData.getTimestamp(3, 6).toLocalDateTime().toString()) .isEqualTo("1970-01-01T00:02:03.456"); }
public Set<ConstraintViolation<?>> getConstraintViolations() { return constraintViolations; }
@Test void retainsTheSetOfExceptions() { assertThat(e.getConstraintViolations()) .isNotEmpty(); }
public void checkRepoAndAddWarningIfRequired() { try { if (configRepository.getLooseObjectCount() >= systemEnvironment.get(SystemEnvironment.GO_CONFIG_REPO_GC_LOOSE_OBJECT_WARNING_THRESHOLD)) { String message = "Action required: Run 'git gc' on config.git repo"; String description = "Number of loose objects in your Configuration repository(config.git) has grown beyond " + "the configured threshold. As the size of config repo increases, the config save operations tend to slow down " + "drastically. It is recommended that you run 'git gc' from " + "'&lt;go server installation directory&gt;/db/config.git/' to address this problem. Go can do this " + "automatically on a periodic basis if you enable automatic GC. <a target='_blank' href='" + docsUrl("/advanced_usage/config_repo.html") + "'>read more...</a>"; serverHealthService.update(ServerHealthState.warningWithHtml(message, description, HealthStateType.general(HealthStateScope.forConfigRepo(SCOPE)))); LOGGER.warn("{}:{}", message, description); } else { serverHealthService.removeByScope(HealthStateScope.forConfigRepo(SCOPE)); } } catch (Exception e) { LOGGER.error(e.getMessage(), e); } }
@Test public void shouldRemoteExistingWarningAboutGCIfLooseObjectCountGoesBelowTheSetThreshold() throws Exception { serverHealthService.update(ServerHealthState.warning("message", "description", HealthStateType.general(HealthStateScope.forConfigRepo("GC")))); assertThat(serverHealthService.logsSortedForScope(HealthStateScope.forConfigRepo("GC")).isEmpty(), is(false)); when(systemEnvironment.get(SystemEnvironment.GO_CONFIG_REPO_GC_LOOSE_OBJECT_WARNING_THRESHOLD)).thenReturn(10L); when(configRepository.getLooseObjectCount()).thenReturn(1L); service.checkRepoAndAddWarningIfRequired(); assertThat(serverHealthService.logsSortedForScope(HealthStateScope.forConfigRepo("GC")).isEmpty(), is(true)); }
@Override public Stream<MappingField> resolveAndValidateFields( boolean isKey, List<MappingField> userFields, Map<String, String> options, InternalSerializationService serializationService ) { Map<QueryPath, MappingField> fieldsByPath = extractFields(userFields, isKey); PortableId portableId = getPortableId(fieldsByPath, options, isKey); ClassDefinition classDefinition = serializationService.getPortableContext() .lookupClassDefinition(portableId); // Fallback option for the case, when the portable objects were not de/serialized yet // and user fields were not provided by the user explicitly. In this case we try to // manually create a Portable instance and register its ClassDefinition. if (userFields.isEmpty() && classDefinition == null) { SerializationServiceV1 ss = (SerializationServiceV1) serializationService; // Try to create a Portable instance with the default constructor, // register its ClassDefinition, and throw object away. var tempPortableObj = ss.getPortableSerializer() .createNewPortableInstance(portableId.getFactoryId(), portableId.getClassId()); if (tempPortableObj != null) { try { ss.getPortableContext().lookupOrRegisterClassDefinition(tempPortableObj); } catch (Exception e) { // If the default constructor doesn't make Portable fields non-null,we're done: // we can't register the class, so we interrupt the execution with the exception. throw QueryException.error("Cannot create mapping for Portable type. " + "Please, provide the explicit definition for all columns."); } classDefinition = serializationService.getPortableContext().lookupClassDefinition(portableId); } } return userFields.isEmpty() ? resolveFields(isKey, classDefinition) : resolveAndValidateFields(isKey, fieldsByPath, classDefinition); }
@Test @Parameters({ "true, __key", "false, this" }) public void when_userDeclaresField_then_itsNameHasPrecedenceOverClassDefinitionOne(boolean key, String prefix) { InternalSerializationService ss = new DefaultSerializationServiceBuilder().build(); ClassDefinition classDefinition = new ClassDefinitionBuilder(1, 2, 3) .addIntField("field") .build(); ss.getPortableContext().registerClassDefinition(classDefinition); Map<String, String> options = ImmutableMap.of( (key ? OPTION_KEY_FACTORY_ID : OPTION_VALUE_FACTORY_ID), String.valueOf(classDefinition.getFactoryId()), (key ? OPTION_KEY_CLASS_ID : OPTION_VALUE_CLASS_ID), String.valueOf(classDefinition.getClassId()), (key ? OPTION_KEY_CLASS_VERSION : OPTION_VALUE_CLASS_VERSION), String.valueOf(classDefinition.getVersion()) ); Stream<MappingField> resolvedFields = INSTANCE.resolveAndValidateFields( key, singletonList(field("renamed_field", QueryDataType.INT, prefix + ".field")), options, ss ); assertThat(resolvedFields).containsExactly( field("renamed_field", QueryDataType.INT, prefix + ".field") ); }
public static <E extends Enum<E>> E getBy(Class<E> enumClass, Predicate<? super E> predicate) { if(null == enumClass || null == predicate){ return null; } return Arrays.stream(enumClass.getEnumConstants()) .filter(predicate).findFirst().orElse(null); }
@Test public void getByTest() { // 枚举中字段互相映射使用 TestEnum testEnum = EnumUtil.getBy(TestEnum::ordinal, 1); assertEquals("TEST2", testEnum.name()); }
public void update(Map<String, NamespaceBundleStats> bundleStats, int topk) { arr.clear(); try { var isLoadBalancerSheddingBundlesWithPoliciesEnabled = pulsar.getConfiguration().isLoadBalancerSheddingBundlesWithPoliciesEnabled(); for (var etr : bundleStats.entrySet()) { String bundle = etr.getKey(); // TODO: do not filter system topic while shedding if (NamespaceService.isSystemServiceNamespace(NamespaceBundle.getBundleNamespace(bundle))) { continue; } if (!isLoadBalancerSheddingBundlesWithPoliciesEnabled && hasPolicies(bundle)) { continue; } arr.add(etr); } var topKBundlesLoadData = loadData.getTopBundlesLoadData(); topKBundlesLoadData.clear(); if (arr.isEmpty()) { return; } topk = Math.min(topk, arr.size()); partitionSort(arr, topk); for (int i = topk - 1; i >= 0; i--) { var etr = arr.get(i); topKBundlesLoadData.add( new TopBundlesLoadData.BundleLoadData(etr.getKey(), (NamespaceBundleStats) etr.getValue())); } } finally { arr.clear(); } }
@Test public void testIsolationPolicy() throws MetadataStoreException { setIsolationPolicy(); Map<String, NamespaceBundleStats> bundleStats = new HashMap<>(); var topKBundles = new TopKBundles(pulsar); NamespaceBundleStats stats1 = new NamespaceBundleStats(); stats1.msgRateIn = 500; bundleStats.put(bundle1, stats1); NamespaceBundleStats stats2 = new NamespaceBundleStats(); stats2.msgRateIn = 10000; bundleStats.put(bundle2, stats2); topKBundles.update(bundleStats, 2); assertEquals(topKBundles.getLoadData().getTopBundlesLoadData().size(), 1); var top0 = topKBundles.getLoadData().getTopBundlesLoadData().get(0); assertEquals(top0.bundleName(), bundle2); }
public CsvData read() throws IORuntimeException { return read(this.reader, false); }
@Test public void lineLimitTest() { // 从原始第2行开始读取 CsvReader reader = new CsvReader(CsvReadConfig.defaultConfig().setBeginLineNo(2)); CsvData data = reader.read(ResourceUtil.getReader("test_lines.csv", CharsetUtil.CHARSET_UTF_8)); assertEquals(2, data.getRow(0).getOriginalLineNumber()); assertEquals("1,2,3,4", CollUtil.join(data.getRow(0), ",")); assertEquals(4, data.getRow(1).getOriginalLineNumber()); assertEquals("q,w,e,r,我是一段\n带换行的内容", CollUtil.join(data.getRow(1), ",").replace("\r", "")); // 文件中第3行数据,对应原始行号是6(从0开始) assertEquals(6, data.getRow(2).getOriginalLineNumber()); assertEquals("a,s,d,f", CollUtil.join(data.getRow(2), ",")); }
@Override public BlobHttpContent getContent() { // TODO: Consider giving progress on manifest push as well? return new BlobHttpContent( Blobs.from(manifestTemplate), manifestTemplate.getManifestMediaType()); }
@Test public void testGetContent() throws IOException { BlobHttpContent body = testManifestPusher.getContent(); Assert.assertNotNull(body); Assert.assertEquals(V22ManifestTemplate.MANIFEST_MEDIA_TYPE, body.getType()); ByteArrayOutputStream bodyCaptureStream = new ByteArrayOutputStream(); body.writeTo(bodyCaptureStream); String v22manifestJson = new String(Files.readAllBytes(v22manifestJsonFile), StandardCharsets.UTF_8); Assert.assertEquals( v22manifestJson, new String(bodyCaptureStream.toByteArray(), StandardCharsets.UTF_8)); }
@SuppressWarnings("unchecked") @Override public boolean apply(Map.Entry mapEntry) { return !super.apply(mapEntry); }
@Test public void apply_givenAttributeValueIsNull_whenEntryHasTheAttributeNull_thenReturnFalse() { NotEqualPredicate name = new NotEqualPredicate("name", null); QueryableEntry mockEntry = newMockEntry(null); boolean result = name.apply(mockEntry); assertFalse(result); }
static boolean needLoad(String moduleLoadList, String moduleName) { String[] activatedModules = StringUtils.splitWithCommaOrSemicolon(moduleLoadList); boolean match = false; for (String activatedModule : activatedModules) { if (StringUtils.ALL.equals(activatedModule)) { match = true; } else if (activatedModule.equals(moduleName)) { match = true; } else if (match && (activatedModule.equals("!" + moduleName) || activatedModule.equals("-" + moduleName))) { match = false; break; } } return match; }
@Test public void needLoad() throws Exception { Assert.assertTrue(ModuleFactory.needLoad("*", "xxx")); Assert.assertTrue(ModuleFactory.needLoad("*,xxx", "xxx")); Assert.assertTrue(ModuleFactory.needLoad("xxx", "xxx")); Assert.assertTrue(ModuleFactory.needLoad("xxx,yyy", "xxx")); Assert.assertTrue(ModuleFactory.needLoad("yyy,xxx", "xxx")); Assert.assertTrue(ModuleFactory.needLoad("yyy,xxx,zzz", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("yyy", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("xxxx", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("xxxx,yyy", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("yyy,xxxx", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("*,-xxx", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("a,b,-xxx", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("xxx,-xxx", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("*,!xxx", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("a,b,!xxx", "xxx")); Assert.assertFalse(ModuleFactory.needLoad("xxx,!xxx", "xxx")); }
@Override public CRTask deserialize(JsonElement json, Type type, JsonDeserializationContext context) throws JsonParseException { return determineJsonElementForDistinguishingImplementers(json, context, TYPE, ARTIFACT_ORIGIN); }
@Test public void shouldInstantiateATaskOfTypeExec() { JsonObject jsonObject = new JsonObject(); jsonObject.addProperty("type", "exec"); taskTypeAdapter.deserialize(jsonObject, type, jsonDeserializationContext); verify(jsonDeserializationContext).deserialize(jsonObject, CRExecTask.class); }
@Override public HttpHeaders add(HttpHeaders headers) { if (headers instanceof DefaultHttpHeaders) { this.headers.add(((DefaultHttpHeaders) headers).headers); return this; } else { return super.add(headers); } }
@Test public void toStringOnMultipleHeaders() { assertEquals("DefaultHttpHeaders[foo: bar, baz: qix]", newDefaultDefaultHttpHeaders() .add("foo", "bar") .add("baz", "qix") .toString()); }
public static String generateWalletFile( String password, ECKeyPair ecKeyPair, File destinationDirectory, boolean useFullScrypt) throws CipherException, IOException { WalletFile walletFile; if (useFullScrypt) { walletFile = Wallet.createStandard(password, ecKeyPair); } else { walletFile = Wallet.createLight(password, ecKeyPair); } String fileName = getWalletFileName(walletFile); File destination = new File(destinationDirectory, fileName); objectMapper.writeValue(destination, walletFile); return fileName; }
@Test public void testGenerateLightWalletFile() throws Exception { String fileName = WalletUtils.generateWalletFile(PASSWORD, KEY_PAIR, tempDir, false); testGenerateWalletFile(fileName); }
public Duration computeReadTimeout(HttpRequestMessage request, int attemptNum) { IClientConfig clientConfig = getRequestClientConfig(request); Long originTimeout = getOriginReadTimeout(); Long requestTimeout = getRequestReadTimeout(clientConfig); long computedTimeout; if (originTimeout == null && requestTimeout == null) { computedTimeout = MAX_OUTBOUND_READ_TIMEOUT_MS.get(); } else if (originTimeout == null || requestTimeout == null) { computedTimeout = originTimeout == null ? requestTimeout : originTimeout; } else { // return the stricter (i.e. lower) of the two timeouts computedTimeout = Math.min(originTimeout, requestTimeout); } // enforce max timeout upperbound return Duration.ofMillis(Math.min(computedTimeout, MAX_OUTBOUND_READ_TIMEOUT_MS.get())); }
@Test void computeReadTimeout_bolth_enforceMax() { requestConfig.set( CommonClientConfigKey.ReadTimeout, (int) OriginTimeoutManager.MAX_OUTBOUND_READ_TIMEOUT_MS.get() + 1000); originConfig.set( CommonClientConfigKey.ReadTimeout, (int) OriginTimeoutManager.MAX_OUTBOUND_READ_TIMEOUT_MS.get() + 10000); Duration timeout = originTimeoutManager.computeReadTimeout(request, 1); assertEquals(OriginTimeoutManager.MAX_OUTBOUND_READ_TIMEOUT_MS.get(), timeout.toMillis()); }
boolean shouldHeartbeat(long now) { update(now); return heartbeatTimer.isExpired(); }
@Test public void testShouldHeartbeat() { heartbeat.sentHeartbeat(time.milliseconds()); time.sleep((long) ((float) heartbeatIntervalMs * 1.1)); assertTrue(heartbeat.shouldHeartbeat(time.milliseconds())); }
public void finishRefresh() { if (lazyLoaded.get() == null) { throw new IllegalStateException( "Cannot finish refresh - call lazyRefresh() first"); } current.set(lazyLoaded.get()); lazyLoaded.set(null); }
@Test(expected = IllegalStateException.class) public void testFinishRefreshWithoutLazyRefresh() throws IOException { FileWriter efw = new FileWriter(excludesFile); FileWriter ifw = new FileWriter(includesFile); efw.close(); ifw.close(); HostsFileReader hfp = new HostsFileReader(includesFile, excludesFile); hfp.finishRefresh(); }
public final void containsNoneIn(@Nullable Iterable<?> excluded) { Collection<?> actual = iterableToCollection(checkNotNull(this.actual)); checkNotNull(excluded); // TODO(cpovirk): Produce a better exception message. List<@Nullable Object> present = new ArrayList<>(); for (Object item : Sets.newLinkedHashSet(excluded)) { if (actual.contains(item)) { present.add(item); } } if (!present.isEmpty()) { failWithoutActual( fact("expected not to contain any of", annotateEmptyStrings(excluded)), fact("but contained", annotateEmptyStrings(present)), fullContents()); } }
@Test @SuppressWarnings("ContainsNoneInWithVarArgsToContainsNoneOf") public void iterableContainsNoneInIterable() { assertThat(asList(1, 2, 3)).containsNoneIn(asList(4, 5, 6)); expectFailureWhenTestingThat(asList(1, 2, 3)).containsNoneIn(asList(1, 2, 4)); assertFailureKeys("expected not to contain any of", "but contained", "full contents"); assertFailureValue("expected not to contain any of", "[1, 2, 4]"); assertFailureValue("but contained", "[1, 2]"); assertFailureValue("full contents", "[1, 2, 3]"); }
@Override public <T> T run(Supplier<T> toRun, Function<Throwable, T> fallback) { Entry entry = null; try { entry = SphU.entry(resourceName, entryType); // If the SphU.entry() does not throw `BlockException`, it means that the // request can pass. return toRun.get(); } catch (BlockException ex) { // SphU.entry() may throw BlockException which indicates that // the request was rejected (flow control or circuit breaking triggered). // So it should not be counted as the business exception. return fallback.apply(ex); } catch (Exception ex) { // For other kinds of exceptions, we'll trace the exception count via // Tracer.trace(ex). Tracer.trace(ex); return fallback.apply(ex); } finally { // Guarantee the invocation has been completed. if (entry != null) { entry.exit(); } } }
@Test public void testCreateWithNullRule() { String id = "testCreateCbWithNullRule"; CircuitBreaker cb = new SentinelCircuitBreaker(id, Collections.singletonList(null)); assertThat(cb.run(() -> "Sentinel")).isEqualTo("Sentinel"); assertThat(DegradeRuleManager.hasConfig(id)).isFalse(); }
@Override public DeleteConsumerGroupsResult deleteConsumerGroups(Collection<String> groupIds, DeleteConsumerGroupsOptions options) { SimpleAdminApiFuture<CoordinatorKey, Void> future = DeleteConsumerGroupsHandler.newFuture(groupIds); DeleteConsumerGroupsHandler handler = new DeleteConsumerGroupsHandler(logContext); invokeDriver(handler, future, options.timeoutMs); return new DeleteConsumerGroupsResult(future.all().entrySet().stream() .collect(Collectors.toMap(entry -> entry.getKey().idValue, Map.Entry::getValue))); }
@Test public void testDeleteConsumerGroupsWithOlderBroker() throws Exception { final List<String> groupIds = singletonList("groupId"); ApiVersion findCoordinatorV3 = new ApiVersion() .setApiKey(ApiKeys.FIND_COORDINATOR.id) .setMinVersion((short) 0) .setMaxVersion((short) 3); ApiVersion describeGroups = new ApiVersion() .setApiKey(ApiKeys.DESCRIBE_GROUPS.id) .setMinVersion((short) 0) .setMaxVersion(ApiKeys.DELETE_GROUPS.latestVersion()); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(1, 0))) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create(asList(findCoordinatorV3, describeGroups))); // Retriable FindCoordinatorResponse errors should be retried env.kafkaClient().prepareResponse(prepareOldFindCoordinatorResponse(Errors.COORDINATOR_NOT_AVAILABLE, Node.noNode())); env.kafkaClient().prepareResponse(prepareOldFindCoordinatorResponse(Errors.COORDINATOR_LOAD_IN_PROGRESS, Node.noNode())); env.kafkaClient().prepareResponse(prepareOldFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); final DeletableGroupResultCollection validResponse = new DeletableGroupResultCollection(); validResponse.add(new DeletableGroupResult() .setGroupId("groupId") .setErrorCode(Errors.NONE.code())); env.kafkaClient().prepareResponse(new DeleteGroupsResponse( new DeleteGroupsResponseData() .setResults(validResponse) )); final DeleteConsumerGroupsResult result = env.adminClient().deleteConsumerGroups(groupIds); final KafkaFuture<Void> results = result.deletedGroups().get("groupId"); assertNull(results.get()); // should throw error for non-retriable errors env.kafkaClient().prepareResponse( prepareOldFindCoordinatorResponse(Errors.GROUP_AUTHORIZATION_FAILED, Node.noNode())); DeleteConsumerGroupsResult errorResult = env.adminClient().deleteConsumerGroups(groupIds); TestUtils.assertFutureError(errorResult.deletedGroups().get("groupId"), GroupAuthorizationException.class); // Retriable errors should be retried env.kafkaClient().prepareResponse( prepareOldFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); final DeletableGroupResultCollection errorResponse = new DeletableGroupResultCollection(); errorResponse.add(new DeletableGroupResult() .setGroupId("groupId") .setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code()) ); env.kafkaClient().prepareResponse(new DeleteGroupsResponse( new DeleteGroupsResponseData() .setResults(errorResponse))); /* * We need to return two responses here, one for NOT_COORDINATOR call when calling delete a consumer group * api using coordinator that has moved. This will retry whole operation. So we need to again respond with a * FindCoordinatorResponse. * * And the same reason for the following COORDINATOR_NOT_AVAILABLE error response */ DeletableGroupResultCollection coordinatorMoved = new DeletableGroupResultCollection(); coordinatorMoved.add(new DeletableGroupResult() .setGroupId("groupId") .setErrorCode(Errors.NOT_COORDINATOR.code()) ); env.kafkaClient().prepareResponse(new DeleteGroupsResponse( new DeleteGroupsResponseData() .setResults(coordinatorMoved))); env.kafkaClient().prepareResponse(prepareOldFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); coordinatorMoved = new DeletableGroupResultCollection(); coordinatorMoved.add(new DeletableGroupResult() .setGroupId("groupId") .setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()) ); env.kafkaClient().prepareResponse(new DeleteGroupsResponse( new DeleteGroupsResponseData() .setResults(coordinatorMoved))); env.kafkaClient().prepareResponse(prepareOldFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); env.kafkaClient().prepareResponse(new DeleteGroupsResponse( new DeleteGroupsResponseData() .setResults(validResponse))); errorResult = env.adminClient().deleteConsumerGroups(groupIds); final KafkaFuture<Void> errorResults = errorResult.deletedGroups().get("groupId"); assertNull(errorResults.get()); } }
@Override public GithubWebhookUserSession createGithubWebhookUserSession() { return new GithubWebhookUserSession(); }
@Test public void createGithubWebhookUserSession_returnsNonNullGithubWebhookUserSession() { GithubWebhookUserSession githubWebhookUserSession = userSessionFactory.createGithubWebhookUserSession(); assertThat(githubWebhookUserSession).isNotNull(); }
@SuppressWarnings("unchecked") public static <K, V> Map<K, V> mapByKey(String key, List<?> list) { Map<K, V> map = new HashMap<>(); if (CollectionUtils.isEmpty(list)) { return map; } try { Class<?> clazz = list.get(0).getClass(); Field field = deepFindField(clazz, key); if (field == null) { throw new IllegalArgumentException("Could not find the key"); } field.setAccessible(true); for (Object o : list) { map.put((K) field.get(o), (V) o); } } catch (Exception e) { throw new BeanUtilsException(e); } return map; }
@Test public void testMapByKeyNotEmptyList() { someAnotherList.add(new KeyClass()); assertNotNull(BeanUtils.mapByKey("keys", someAnotherList)); }
@VisibleForTesting String importSingleAlbum(UUID jobId, TokensAndUrlAuthData authData, PhotoAlbum inputAlbum) throws IOException, InvalidTokenException, PermissionDeniedException, UploadErrorException { // Set up album GoogleAlbum googleAlbum = new GoogleAlbum(); googleAlbum.setTitle(GooglePhotosImportUtils.cleanAlbumTitle(inputAlbum.getName())); GoogleAlbum responseAlbum = getOrCreatePhotosInterface(jobId, authData).createAlbum(googleAlbum); return responseAlbum.getId(); }
@Test public void retrieveAlbumStringOnlyOnce() throws PermissionDeniedException, InvalidTokenException, IOException, UploadErrorException { String albumId = "Album Id"; String albumName = "Album Name"; String albumDescription = "Album Description"; PhotoAlbum albumModel = new PhotoAlbum(albumId, albumName, albumDescription); PortabilityJob portabilityJob = Mockito.mock(PortabilityJob.class); Mockito.when(portabilityJob.userLocale()).thenReturn("it"); JobStore jobStore = Mockito.mock(JobStore.class); Mockito.when(jobStore.findJob(uuid)).thenReturn(portabilityJob); GoogleAlbum responseAlbum = new GoogleAlbum(); responseAlbum.setId(NEW_ALBUM_ID); Mockito.when(googlePhotosInterface.createAlbum(any(GoogleAlbum.class))) .thenReturn(responseAlbum); GooglePhotosImporter sut = new GooglePhotosImporter( null, jobStore, null, null, googlePhotosInterface, connectionProvider, monitor, 1.0); sut.importSingleAlbum(uuid, null, albumModel); sut.importSingleAlbum(uuid, null, albumModel); Mockito.verify(jobStore, atMostOnce()).findJob(uuid); }
public static PDImageXObject createFromImage(PDDocument document, BufferedImage image) throws IOException { if (isGrayImage(image)) { return createFromGrayImage(image, document); } // We try to encode the image with predictor if (USE_PREDICTOR_ENCODER) { PDImageXObject pdImageXObject = new PredictorEncoder(document, image).encode(); if (pdImageXObject != null) { if (pdImageXObject.getColorSpace() == PDDeviceRGB.INSTANCE && pdImageXObject.getBitsPerComponent() < 16 && image.getWidth() * image.getHeight() <= 50 * 50) { // also create classic compressed image, compare sizes PDImageXObject pdImageXObjectClassic = createFromRGBImage(image, document); if (pdImageXObjectClassic.getCOSObject().getLength() < pdImageXObject.getCOSObject().getLength()) { pdImageXObject.getCOSObject().close(); return pdImageXObjectClassic; } else { pdImageXObjectClassic.getCOSObject().close(); } } return pdImageXObject; } } // Fallback: We export the image as 8-bit sRGB and might lose color information return createFromRGBImage(image, document); }
@Test void testCreateLosslessFromTransparentGIF() throws IOException { PDDocument document = new PDDocument(); BufferedImage image = ImageIO.read(this.getClass().getResourceAsStream("gif.gif")); assertEquals(Transparency.BITMASK, image.getColorModel().getTransparency()); PDImageXObject ximage = LosslessFactory.createFromImage(document, image); int w = image.getWidth(); int h = image.getHeight(); validate(ximage, 8, w, h, "png", PDDeviceRGB.INSTANCE.getName()); checkIdent(image, ximage.getImage()); checkIdentRGB(image, ximage.getOpaqueImage(null, 1)); assertNotNull(ximage.getSoftMask()); validate(ximage.getSoftMask(), 1, w, h, "png", PDDeviceGray.INSTANCE.getName()); assertEquals(2, colorCount(ximage.getSoftMask().getImage())); doWritePDF(document, ximage, TESTRESULTSDIR, "gif.pdf"); }
public static ParseResult parse(String text) { Map<String, String> localProperties = new HashMap<>(); String intpText = ""; String scriptText = null; Matcher matcher = REPL_PATTERN.matcher(text); if (matcher.find()) { String headingSpace = matcher.group(1); intpText = matcher.group(2); int startPos = headingSpace.length() + intpText.length() + 1; if (startPos < text.length() && text.charAt(startPos) == '(') { startPos = parseLocalProperties(text, startPos, localProperties); } scriptText = text.substring(startPos); } else { intpText = ""; scriptText = text; } return new ParseResult(intpText, removeLeadingWhiteSpaces(scriptText), localProperties); }
@Test void testParagraphNoInterpreter() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse("sc.version"); assertEquals("", parseResult.getIntpText()); assertEquals(0, parseResult.getLocalProperties().size()); assertEquals("sc.version", parseResult.getScriptText()); }
public static String substVars(String val, PropertyContainer pc1) { return substVars(val, pc1, null); }
@Test(timeout = 1000) public void detectCircularReferences0() { context.putProperty("A", "${A}"); expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("Circular variable reference detected while parsing input [${A} --> ${A}]"); OptionHelper.substVars("${A}", context); }
@Override public <T> UncommittedBundle<T> createBundle(PCollection<T> output) { if (Enforcement.IMMUTABILITY.appliesTo(output, graph)) { return new ImmutabilityEnforcingBundle<>(underlying.createBundle(output)); } return underlying.createBundle(output); }
@Test public void noMutationCreateBundleSucceeds() { UncommittedBundle<byte[]> intermediate = factory.createBundle(transformed); WindowedValue<byte[]> windowedArray = WindowedValue.of( new byte[] {4, 8, 12}, new Instant(891L), new IntervalWindow(new Instant(0), new Instant(1000)), PaneInfo.ON_TIME_AND_ONLY_FIRING); intermediate.add(windowedArray); CommittedBundle<byte[]> committed = intermediate.commit(Instant.now()); assertThat(committed.getElements(), containsInAnyOrder(windowedArray)); }
@VisibleForTesting boolean isImagePushed(Optional<ManifestAndDigest<ManifestTemplate>> manifestResult) { return !(JibSystemProperties.skipExistingImages() && manifestResult.isPresent()); }
@Test public void testIsImagePushed_skipExistingImageEnabledAndManifestNotPresent() { Optional<ManifestAndDigest<ManifestTemplate>> manifestResult = Mockito.mock(Optional.class); System.setProperty(JibSystemProperties.SKIP_EXISTING_IMAGES, "true"); when(manifestResult.isPresent()).thenReturn(false); Assert.assertTrue(stepsRunner.isImagePushed(manifestResult)); }
boolean hasFile(String fileReference) { return hasFile(new FileReference(fileReference)); }
@Test public void requireThatFileReferenceWithDirectoryCanBeFound() throws IOException { File dir = getFileServerRootDir(); IOUtils.writeFile(dir + "/124/subdir/f1", "test", false); IOUtils.writeFile(dir + "/124/subdir/f2", "test", false); assertTrue(fileServer.hasFile("124/subdir")); }
TaskSpec rebaseTaskSpecTime(TaskSpec spec) throws Exception { ObjectNode node = JsonUtil.JSON_SERDE.valueToTree(spec); node.set("startMs", new LongNode(Math.max(time.milliseconds(), spec.startMs()))); return JsonUtil.JSON_SERDE.treeToValue(node, TaskSpec.class); }
@Test public void testAgentExecWithTimeout() throws Exception { Agent agent = createAgent(Scheduler.SYSTEM); NoOpTaskSpec spec = new NoOpTaskSpec(0, 1); TaskSpec rebasedSpec = agent.rebaseTaskSpecTime(spec); testExec(agent, String.format("Waiting for completion of task:%s%n", JsonUtil.toPrettyJsonString(rebasedSpec)) + String.format("Task failed with status null and error worker expired%n"), false, rebasedSpec); }
@VisibleForTesting void initializeForeachArtifactRollup( ForeachStepOverview foreachOverview, ForeachStepOverview prevForeachOverview, String foreachWorkflowId) { Set<Long> iterationsToRunInNewRun = foreachOverview.getIterationsToRunFromDetails(prevForeachOverview); WorkflowRollupOverview aggregatedRollupsPrevRun = getAggregatedRollupFromIterations(foreachWorkflowId, iterationsToRunInNewRun); foreachOverview.initiateStepRollup(prevForeachOverview.getRollup(), aggregatedRollupsPrevRun); }
@Test public void testGetAggregatedRollupFromIterationsEmpty() { doReturn(Collections.singletonList(new WorkflowRollupOverview())) .when(workflowInstanceDao) .getBatchForeachLatestRunRollupForIterations(anyString(), any()); ForeachStepOverview stepOverview = mock(ForeachStepOverview.class); ForeachStepOverview prevStepOverview = new ForeachStepOverview(); doReturn(new HashSet<Long>()).when(stepOverview).getIterationsToRunFromDetails(any()); foreachStepRuntime.initializeForeachArtifactRollup( stepOverview, prevStepOverview, "myworkflowid"); assertNull(stepOverview.getRollup()); Mockito.verify(workflowInstanceDao, times(0)) .getBatchForeachLatestRunRollupForIterations(eq("myworkflowid"), any()); }
public boolean setPastLine(DefaultIssue issue, @Nullable Integer previousLine) { Integer currentLine = issue.line(); issue.setLine(previousLine); if (!Objects.equals(currentLine, previousLine)) { issue.setLine(currentLine); issue.setChanged(true); return true; } return false; }
@Test void set_past_line() { issue.setLine(42); boolean updated = underTest.setPastLine(issue, 123); assertThat(updated).isTrue(); assertThat(issue.isChanged()).isTrue(); assertThat(issue.line()).isEqualTo(42); assertThat(issue.mustSendNotifications()).isFalse(); // do not save change assertThat(issue.currentChange()).isNull(); }
public Optional<Details> runForeachBatch( Workflow workflow, Long internalId, long workflowVersionId, RunProperties runProperties, String foreachStepId, ForeachArtifact artifact, List<RunRequest> requests, List<Long> instanceIds, int batchSize) { if (ObjectHelper.isCollectionEmptyOrNull(requests)) { return Optional.empty(); } Checks.checkTrue( requests.size() == instanceIds.size(), "Run request list size [%s] must match instance id list size [%s]", requests.size(), instanceIds.size()); List<WorkflowInstance> instances; if (artifact.isFreshRun()) { instances = createStartForeachInstances( workflow, internalId, workflowVersionId, artifact.getForeachRunId(), runProperties, requests, instanceIds); } else { instances = createRestartForeachInstances( workflow, internalId, workflowVersionId, runProperties, foreachStepId, artifact, requests, instanceIds); } if (ObjectHelper.isCollectionEmptyOrNull(instances)) { return Optional.empty(); } return instanceDao.runWorkflowInstances(workflow.getId(), instances, batchSize); }
@Test public void testRunForeachBatch() { RunRequest request = RunRequest.builder() .initiator(new ManualInitiator()) .currentPolicy(RunPolicy.START_FRESH_NEW_RUN) .build(); Optional<Details> errors = actionHandler.runForeachBatch( definition.getWorkflow(), 123L, 1L, new RunProperties(), "foreach-step", new ForeachArtifact(), Collections.singletonList(request), Collections.singletonList(1L), 1); assertFalse(errors.isPresent()); verify(instanceDao, times(1)).runWorkflowInstances(any(), any(), eq(1)); }
@EventListener(ApplicationEvent.class) void onApplicationEvent(ApplicationEvent event) { if (AnnotationUtils.findAnnotation(event.getClass(), SharedEvent.class) == null) { return; } // we should copy the plugins list to avoid ConcurrentModificationException var startedPlugins = new ArrayList<>(pluginManager.getStartedPlugins()); // broadcast event to all started plugins except the publisher for (var startedPlugin : startedPlugins) { var plugin = startedPlugin.getPlugin(); if (!(plugin instanceof SpringPlugin springPlugin)) { continue; } var context = springPlugin.getApplicationContext(); // make sure the context is running before publishing the event if (context instanceof Lifecycle lifecycle && lifecycle.isRunning()) { context.publishEvent(new HaloSharedEventDelegator(this, event)); } } }
@Test void shouldNotDispatchEventToAllStartedPluginsWhilePluginContextIsNotRunning() { var pw = mock(PluginWrapper.class); var plugin = mock(SpringPlugin.class); var context = mock(ApplicationContext.class, withSettings().extraInterfaces(Lifecycle.class)); when(((Lifecycle) context).isRunning()).thenReturn(false); when(plugin.getApplicationContext()).thenReturn(context); when(pw.getPlugin()).thenReturn(plugin); when(pluginManager.getStartedPlugins()).thenReturn(List.of(pw)); var event = new FakeSharedEvent(this); dispatcher.onApplicationEvent(event); verify(context, never()).publishEvent(event); }
@Override public SpringCache getCache(final String name) { final RemoteCache<Object, Object> nativeCache = this.nativeCacheManager.getCache(name); if (nativeCache == null) { springCaches.remove(name); return null; } return springCaches.computeIfAbsent(name, n -> new SpringCache(nativeCache, reactive, readTimeout, writeTimeout)); }
@Test public final void springRemoteCacheManagerShouldProperlyCreateCache() { final Cache defaultCache = objectUnderTest.getCache(TEST_CACHE_NAME); assertNotNull("getCache(" + TEST_CACHE_NAME + ") should have returned a default cache. However, it returned null.", defaultCache); assertEquals("getCache(" + TEST_CACHE_NAME + ") should have returned a cache name \"" + TEST_CACHE_NAME + "\". However, the returned cache has a different name.", TEST_CACHE_NAME, defaultCache.getName()); }
@SuppressFBWarnings(justification = "try with resource will clenaup the resources", value = {"OBL_UNSATISFIED_OBLIGATION"}) public List<SuppressionRule> parseSuppressionRules(File file) throws SuppressionParseException { try (FileInputStream fis = new FileInputStream(file)) { return parseSuppressionRules(fis); } catch (SAXException | IOException ex) { LOGGER.debug("", ex); throw new SuppressionParseException(ex); } }
@Test public void testParseSuppressionRulesV1dot2() throws Exception { //File file = new File(this.getClass().getClassLoader().getResource("suppressions.xml").getPath()); File file = BaseTest.getResourceAsFile(this, "suppressions_1_2.xml"); SuppressionParser instance = new SuppressionParser(); List<SuppressionRule> result = instance.parseSuppressionRules(file); Assert.assertEquals(4, result.size()); }
@Override public Reiterator<Object> get(int tag) { return new SubIterator(tag); }
@Test public void testSequentialAccess() { TaggedReiteratorList iter = create(3, new String[] {"a", "b", "c"}); for (int i = 0; i < 2; i++) { assertEquals(iter.get(0), "a0", "b0", "c0"); assertEquals(iter.get(1), "a1", "b1", "c1"); assertEquals(iter.get(2), "a2", "b2", "c2"); } for (int i = 0; i < 2; i++) { assertEquals(iter.get(2), "a2", "b2", "c2"); assertEquals(iter.get(1), "a1", "b1", "c1"); assertEquals(iter.get(0), "a0", "b0", "c0"); } }
@Override public YamlPipelineProcessConfiguration swapToYamlConfiguration(final PipelineProcessConfiguration data) { if (null == data) { return null; } YamlPipelineProcessConfiguration result = new YamlPipelineProcessConfiguration(); result.setRead(readConfigSwapper.swapToYamlConfiguration(data.getRead())); result.setWrite(writeConfigSwapper.swapToYamlConfiguration(data.getWrite())); result.setStreamChannel(algorithmSwapper.swapToYamlConfiguration(data.getStreamChannel())); return result; }
@Test void assertSwapToYamlConfigurationWithNull() { assertNull(new YamlPipelineProcessConfigurationSwapper().swapToYamlConfiguration(null)); }
@ScalarOperator(NOT_EQUAL) @SqlType(StandardTypes.BOOLEAN) @SqlNullable public static Boolean notEqual(@SqlType(StandardTypes.SMALLINT) long left, @SqlType(StandardTypes.SMALLINT) long right) { return left != right; }
@Test public void testNotEqual() { assertFunction("SMALLINT'37' <> SMALLINT'37'", BOOLEAN, false); assertFunction("SMALLINT'37' <> SMALLINT'17'", BOOLEAN, true); assertFunction("SMALLINT'17' <> SMALLINT'37'", BOOLEAN, true); assertFunction("SMALLINT'17' <> SMALLINT'17'", BOOLEAN, false); }
@Override public double variance() { return n * p * (1 - p); }
@Test public void testVariance() { System.out.println("variance"); BinomialDistribution instance = new BinomialDistribution(100, 0.3); instance.rand(); assertEquals(21.0, instance.variance(), 1E-7); }
@Override public void writeTo(ByteBuf byteBuf) throws LispWriterException { WRITER.writeTo(byteBuf, this); }
@Test public void testSerialization() throws LispReaderException, LispWriterException, LispParseError, DeserializationException { ByteBuf byteBuf = Unpooled.buffer(); EcmWriter writer = new EcmWriter(); writer.writeTo(byteBuf, ecm1); EcmReader reader = new EcmReader(); LispEncapsulatedControl deserialized = reader.readFrom(byteBuf); new EqualsTester().addEqualityGroup(ecm1, deserialized).testEquals(); }
public static Coin getCoin(CoinType type) { return type.getConstructor().get(); }
@Test void shouldReturnGoldCoinInstance() { final var goldCoin = CoinFactory.getCoin(CoinType.GOLD); assertTrue(goldCoin instanceof GoldCoin); }
public static List<Group> enumerateFrom(Group root) { List<Group> leaves = new ArrayList<>(); visitNode(root, leaves); return leaves; }
@Test void multipleLeafGroupsAreEnumerated() throws Exception { Group g = new Group(0, "donkeykong", dummyDistribution()); Group child = new Group(1, "mario", dummyDistribution()); child.addSubGroup(new Group(2, "toad")); child.addSubGroup(new Group(3, "yoshi")); g.addSubGroup(child); g.addSubGroup(new Group(4, "luigi")); List<Group> leaves = LeafGroups.enumerateFrom(g); // Ensure that output order matches insertion order. leaves.sort((a, b) -> Integer.compare(a.getIndex(), b.getIndex())); assertThat(leaves.size(), is(3)); assertThat(leaves.get(0).getName(), is("toad")); assertThat(leaves.get(1).getName(), is("yoshi")); assertThat(leaves.get(2).getName(), is("luigi")); }
@Override public Page<ConfigInfo> findConfigInfoLike4Page(final int pageNo, final int pageSize, final String dataId, final String group, final String tenant, final Map<String, Object> configAdvanceInfo) { String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; final String appName = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("appName"); final String content = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("content"); final String types = Optional.ofNullable(configAdvanceInfo).map(e -> (String) e.get(ParametersField.TYPES)).orElse(null); final String configTags = configAdvanceInfo == null ? null : (String) configAdvanceInfo.get("config_tags"); MapperResult sqlCountRows; MapperResult sqlFetchRows; MapperContext context = new MapperContext((pageNo - 1) * pageSize, pageSize); context.putWhereParameter(FieldConstant.TENANT_ID, generateLikeArgument(tenantTmp)); if (!StringUtils.isBlank(dataId)) { context.putWhereParameter(FieldConstant.DATA_ID, generateLikeArgument(dataId)); } if (!StringUtils.isBlank(group)) { context.putWhereParameter(FieldConstant.GROUP_ID, generateLikeArgument(group)); } if (!StringUtils.isBlank(appName)) { context.putWhereParameter(FieldConstant.APP_NAME, appName); } if (!StringUtils.isBlank(content)) { context.putWhereParameter(FieldConstant.CONTENT, generateLikeArgument(content)); } if (StringUtils.isNotBlank(types)) { String[] typesArr = types.split(Symbols.COMMA); context.putWhereParameter(FieldConstant.TYPE, typesArr); } if (StringUtils.isNotBlank(configTags)) { String[] tagArr = configTags.split(","); context.putWhereParameter(FieldConstant.TAG_ARR, tagArr); ConfigTagsRelationMapper configTagsRelationMapper = mapperManager.findMapper( dataSourceService.getDataSourceType(), TableConstant.CONFIG_TAGS_RELATION); sqlCountRows = configTagsRelationMapper.findConfigInfoLike4PageCountRows(context); sqlFetchRows = configTagsRelationMapper.findConfigInfoLike4PageFetchRows(context); } else { ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(), TableConstant.CONFIG_INFO); sqlCountRows = configInfoMapper.findConfigInfoLike4PageCountRows(context); sqlFetchRows = configInfoMapper.findConfigInfoLike4PageFetchRows(context); } PaginationHelper<ConfigInfo> helper = createPaginationHelper(); Page<ConfigInfo> page = helper.fetchPageLimit(sqlCountRows, sqlFetchRows, pageNo, pageSize, CONFIG_INFO_ROW_MAPPER); for (ConfigInfo configInfo : page.getPageItems()) { Pair<String, String> pair = EncryptionHandler.decryptHandler(configInfo.getDataId(), configInfo.getEncryptedDataKey(), configInfo.getContent()); configInfo.setContent(pair.getSecond()); } return page; }
@Test void testFindConfigInfoLike4PageWithTags() { String appName = "appName1234"; String content = "content123"; Map<String, Object> configAdvanceInfo = new HashMap<>(); configAdvanceInfo.put("appName", appName); configAdvanceInfo.put("content", content); configAdvanceInfo.put("config_tags", "tags,tag2"); String dataId = "dataId4567222*"; String group = "group3456789*"; String tenant = "tenant4567890"; //mock total count when(databaseOperate.queryOne(anyString(), eq(new Object[] {tenant, dataId.replaceAll("\\*", "%"), group.replaceAll("\\*", "%"), appName, content, "tags", "tag2"}), eq(Integer.class))).thenReturn(new Integer(9)); //mock page list List<ConfigInfo> result = new ArrayList<>(); result.add(createMockConfigInfo(0)); result.add(createMockConfigInfo(1)); result.add(createMockConfigInfo(2)); when(databaseOperate.queryMany(anyString(), eq(new Object[] {tenant, dataId.replaceAll("\\*", "%"), group.replaceAll("\\*", "%"), appName, content, "tags", "tag2"}), eq(CONFIG_INFO_ROW_MAPPER))).thenReturn(result); Page<ConfigInfo> configInfo4Page = embeddedConfigInfoPersistService.findConfigInfoLike4Page(1, 3, dataId, group, tenant, configAdvanceInfo); assertEquals(result.size(), configInfo4Page.getPageItems().size()); assertEquals(9, configInfo4Page.getTotalCount()); }
@Override public void run() { try { // make sure we call afterRun() even on crashes // and operate countdown latches, else we may hang the parallel runner if (steps == null) { beforeRun(); } if (skipped) { return; } int count = steps.size(); int index = 0; while ((index = nextStepIndex()) < count) { currentStep = steps.get(index); execute(currentStep); if (currentStepResult != null) { // can be null if debug step-back or hook skip result.addStepResult(currentStepResult); } } } catch (Exception e) { if (currentStepResult != null) { result.addStepResult(currentStepResult); } logError("scenario [run] failed\n" + StringUtils.throwableToString(e)); currentStepResult = result.addFakeStepResult("scenario [run] failed", e); } finally { if (!skipped) { afterRun(); if (isFailed() && engine.getConfig().isAbortSuiteOnFailure()) { featureRuntime.suite.abort(); } } if (caller.isNone()) { logAppender.close(); // reclaim memory } } }
@Test void testXmlPath() { run( "def foo = <bar><a><b>c</b></a></bar>", "def res1 = karate.xmlPath(foo, '/bar/a')" ); matchVar("res1", "<a><b>c</b></a>"); }
@Override public String getInstanceStatus() { return client.getInstanceStatus(); }
@Test public void getInstanceStatus() { Assert.assertEquals(status, nacosRegister.getInstanceStatus()); }
public QueueConnection queueConnection(QueueConnection connection) { // It is common to implement both interfaces if (connection instanceof XAQueueConnection) { return xaQueueConnection((XAQueueConnection) connection); } return TracingConnection.create(connection, this); }
@Test void queueConnection_wrapsXaInput() { abstract class Both implements XAQueueConnection, QueueConnection { } assertThat(jmsTracing.queueConnection(mock(Both.class))) .isInstanceOf(XAQueueConnection.class); }
public static List<CredentialRetriever> getToCredentialRetrievers( CommonCliOptions commonCliOptions, DefaultCredentialRetrievers defaultCredentialRetrievers) throws FileNotFoundException { // these are all mutually exclusive as enforced by the CLI commonCliOptions .getUsernamePassword() .ifPresent( credential -> defaultCredentialRetrievers.setKnownCredential( credential, "--username/--password")); commonCliOptions .getToUsernamePassword() .ifPresent( credential -> defaultCredentialRetrievers.setKnownCredential( credential, "--to-username/--to-password")); commonCliOptions .getCredentialHelper() .ifPresent(defaultCredentialRetrievers::setCredentialHelper); commonCliOptions .getToCredentialHelper() .ifPresent(defaultCredentialRetrievers::setCredentialHelper); return defaultCredentialRetrievers.asList(); }
@Test @Parameters(method = "paramsToCredHelper") public void testGetToCredentialRetriever_credHelper(String[] args) throws FileNotFoundException { CommonCliOptions commonCliOptions = CommandLine.populateCommand(new CommonCliOptions(), ArrayUtils.addAll(DEFAULT_ARGS, args)); Credentials.getToCredentialRetrievers(commonCliOptions, defaultCredentialRetrievers); verify(defaultCredentialRetrievers).setCredentialHelper("abc"); verify(defaultCredentialRetrievers).asList(); verifyNoMoreInteractions(defaultCredentialRetrievers); }
public static void main(String[] args) throws Exception { // Create a Tika instance with the default configuration Tika tika = new Tika(); // Parse all given files and print out the extracted // text content for (String file : args) { String text = tika.parseToString(new File(file)); System.out.print(text); } }
@Test public void testSimpleTextExtractor() throws Exception { String message = "This is Tika - Hello, World! This is simple UTF-8 text" + " content written in English to test autodetection of" + " the character encoding of the input stream."; ByteArrayOutputStream buffer = new ByteArrayOutputStream(); PrintStream out = System.out; System.setOut(new PrintStream(buffer, true, UTF_8.name())); File file = new File("target", "test.txt"); FileUtils.writeStringToFile(file, message, UTF_8); SimpleTextExtractor.main(new String[]{file.getPath()}); file.delete(); System.setOut(out); assertContains(message, buffer .toString(UTF_8.name()) .trim()); }
@Override public Set<Class<?>> classes() { Set<Class<?>> output = new HashSet<>(); if (application != null) { Set<Class<?>> clzs = application.getClasses(); if (clzs != null) { for (Class<?> clz : clzs) { if (!isIgnored(clz.getName())) { output.add(clz); } } } Set<Object> singletons = application.getSingletons(); if (singletons != null) { for (Object o : singletons) { if (!isIgnored(o.getClass().getName())) { output.add(o.getClass()); } } } } return output; }
@Test(description = "scan classes from Application when is not set") public void shouldScanForClassesWhenApplicationNotSet() throws Exception { scanner.application(null); assertTrue(scanner.classes().isEmpty()); }
public ResolvedLaunchParams buildContainerLaunchContext( AbstractLauncher launcher, Service service, ComponentInstance instance, SliderFileSystem fileSystem, Configuration yarnConf, Container container, ContainerLaunchService.ComponentLaunchContext compLaunchContext) throws IOException, SliderException { ResolvedLaunchParams resolved = new ResolvedLaunchParams(); processArtifact(launcher, instance, fileSystem, service, compLaunchContext); ServiceContext context = instance.getComponent().getScheduler().getContext(); // Generate tokens (key-value pair) for config substitution. Map<String, String> tokensForSubstitution = buildContainerTokens(instance, container, compLaunchContext); // Setup launch context environment buildContainerEnvironment(launcher, service, instance, fileSystem, yarnConf, container, compLaunchContext, tokensForSubstitution); // create config file on hdfs and addResolvedRsrcPath local resource ProviderUtils.createConfigFileAndAddLocalResource(launcher, fileSystem, compLaunchContext, tokensForSubstitution, instance, context, resolved); // handles static files (like normal file / archive file) for localization. ProviderUtils.handleStaticFilesForLocalization(launcher, fileSystem, compLaunchContext, resolved); // replace launch command with token specific information buildContainerLaunchCommand(launcher, service, instance, fileSystem, yarnConf, container, compLaunchContext, tokensForSubstitution); // Setup container retry settings buildContainerRetry(launcher, yarnConf, compLaunchContext, instance); return resolved; }
@Test public void testBuildContainerLaunchContext() throws Exception { AbstractProviderService providerService = new DockerProviderService(); Component component = serviceContext.scheduler.getAllComponents().entrySet() .iterator().next().getValue(); ContainerLaunchService.ComponentLaunchContext clc = createEntryPointCLCFor(testService, component, "sleep,9000"); ComponentInstance instance = component.getAllComponentInstances().iterator() .next(); Container container = mock(Container.class); ContainerId containerId = ContainerId.newContainerId( ApplicationAttemptId.newInstance(ApplicationId.newInstance( System.currentTimeMillis(), 1), 1), 1L); when(container.getId()).thenReturn(containerId); providerService.buildContainerLaunchContext(launcher, testService, instance, rule.getFs(), serviceContext.scheduler.getConfig(), container, clc); Assert.assertEquals("artifact", clc.getArtifact().getId(), launcher.getDockerImage()); }
@Override public ExecuteContext before(ExecuteContext context) { String name = context.getMethod().getName(); if (context.getArguments() == null || context.getArguments().length == 0) { return context; } Object argument = context.getArguments()[0]; if ("setName".equals(name)) { if (argument == null || argument instanceof String) { setAppNameAndPutParameters(context.getObject(), (String) argument); } } else { if (argument == null || argument instanceof Map<?, ?>) { context.getArguments()[0] = ParametersUtils.putParameters((Map<String, String>) argument, routerConfig); } } return context; }
@Test public void testSetName() throws NoSuchMethodException { Object[] args = new Object[1]; args[0] = ""; ApplicationConfig applicationConfig = new ApplicationConfig(); ExecuteContext context = ExecuteContext.forMemberMethod(applicationConfig, ApplicationConfig.class.getMethod("setName", String.class), args, null, null); // the app name is empty interceptor.before(context); Assert.assertNull(DubboCache.INSTANCE.getAppName()); Assert.assertNull(applicationConfig.getParameters()); // the app name is not empty args[0] = "foo"; interceptor.before(context); Assert.assertEquals("foo", DubboCache.INSTANCE.getAppName()); Map<String, String> parameters = applicationConfig.getParameters(); Assert.assertNotNull(parameters); Assert.assertEquals(0, parameters.size()); }
@Override public SocialClientDO getSocialClient(Long id) { return socialClientMapper.selectById(id); }
@Test public void testGetSocialClient() { // mock 数据 SocialClientDO dbSocialClient = randomPojo(SocialClientDO.class); socialClientMapper.insert(dbSocialClient);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbSocialClient.getId(); // 调用 SocialClientDO socialClient = socialClientService.getSocialClient(id); // 校验数据正确 assertPojoEquals(dbSocialClient, socialClient); }
@SuppressWarnings("unchecked") @Override public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Comparable<?>> shardingValue) { return standardShardingAlgorithm.doSharding(availableTargetNames, shardingValue); }
@Test void assertRangeDoSharding() { ClassBasedShardingAlgorithm algorithm = (ClassBasedShardingAlgorithm) TypedSPILoader.getService(ShardingAlgorithm.class, "CLASS_BASED", PropertiesBuilder.build(new Property("strategy", "standard"), new Property("algorithmClassName", ClassBasedStandardShardingAlgorithmFixture.class.getName()))); Collection<String> availableTargetNames = Arrays.asList("t_order_0", "t_order_1", "t_order_2", "t_order_3"); Collection<String> actual = algorithm.doSharding(availableTargetNames, new RangeShardingValue<>("t_order", "order_id", new DataNodeInfo("t_order_", 1, '0'), Range.closed(2, 15))); assertThat(actual.size(), is(4)); }
@VisibleForTesting static ParallelInstruction forParallelInstruction( ParallelInstruction input, boolean replaceWithByteArrayCoder) throws Exception { try { ParallelInstruction instruction = clone(input, ParallelInstruction.class); if (instruction.getRead() != null) { Source cloudSource = instruction.getRead().getSource(); cloudSource.setCodec(forCodec(cloudSource.getCodec(), replaceWithByteArrayCoder)); } else if (instruction.getWrite() != null) { com.google.api.services.dataflow.model.Sink cloudSink = instruction.getWrite().getSink(); cloudSink.setCodec(forCodec(cloudSink.getCodec(), replaceWithByteArrayCoder)); } else if (instruction.getParDo() != null) { instruction.setParDo( forParDoInstruction(instruction.getParDo(), replaceWithByteArrayCoder)); } else if (instruction.getPartialGroupByKey() != null) { PartialGroupByKeyInstruction pgbk = instruction.getPartialGroupByKey(); pgbk.setInputElementCodec(forCodec(pgbk.getInputElementCodec(), replaceWithByteArrayCoder)); } else if (instruction.getFlatten() != null) { // FlattenInstructions have no codecs to wrap. } else { throw new RuntimeException("Unknown parallel instruction: " + input); } return instruction; } catch (IOException e) { throw new RuntimeException( String.format( "Failed to replace unknown coder with " + "LengthPrefixCoder for : {%s}", input), e); } }
@Test public void testLengthPrefixParDoInstructionCoder() throws Exception { ParDoInstruction parDo = new ParDoInstruction(); CloudObject spec = CloudObject.forClassName(MERGE_BUCKETS_DO_FN); spec.put( WorkerPropertyNames.INPUT_CODER, CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null)); parDo.setUserFn(spec); instruction.setParDo(parDo); ParallelInstruction prefixedInstruction = forParallelInstruction(instruction, false); assertEqualsAsJson( CloudObjects.asCloudObject(prefixedWindowedValueCoder, /*sdkComponents=*/ null), prefixedInstruction.getParDo().getUserFn().get(WorkerPropertyNames.INPUT_CODER)); // Should not mutate the instruction. assertEqualsAsJson( CloudObjects.asCloudObject(windowedValueCoder, /*sdkComponents=*/ null), parDo.getUserFn().get(WorkerPropertyNames.INPUT_CODER)); }
ProducerListeners listeners() { return new ProducerListeners(eventListeners.toArray(new HollowProducerEventListener[0])); }
@Test public void testRemoveDuringCycle() { ProducerListenerSupport ls = new ProducerListenerSupport(); class SecondCycleListener implements CycleListener { int cycleStart; int cycleComplete; @Override public void onCycleSkip(CycleSkipReason reason) { } @Override public void onNewDeltaChain(long version) { } @Override public void onCycleStart(long version) { cycleStart++; } @Override public void onCycleComplete(Status status, HollowProducer.ReadState rs, long version, Duration elapsed) { cycleComplete++; } } class FirstCycleListener extends SecondCycleListener { private SecondCycleListener scl; private FirstCycleListener(SecondCycleListener scl) { this.scl = scl; } @Override public void onCycleStart(long version) { super.onCycleStart(version); ls.removeListener(scl); } } SecondCycleListener scl = new SecondCycleListener(); FirstCycleListener fcl = new FirstCycleListener(scl); ls.addListener(fcl); ls.addListener(scl); ProducerListenerSupport.ProducerListeners s = ls.listeners(); s.fireCycleStart(1); s.fireCycleComplete(new Status.StageWithStateBuilder()); Assert.assertEquals(1, fcl.cycleStart); Assert.assertEquals(1, fcl.cycleComplete); Assert.assertEquals(1, fcl.scl.cycleStart); Assert.assertEquals(1, fcl.scl.cycleComplete); s = ls.listeners(); s.fireCycleStart(1); s.fireCycleComplete(new Status.StageWithStateBuilder()); Assert.assertEquals(2, fcl.cycleStart); Assert.assertEquals(2, fcl.cycleComplete); Assert.assertEquals(1, fcl.scl.cycleStart); Assert.assertEquals(1, fcl.scl.cycleComplete); }
public void addWarning(Component file, LineReader.ReadError readError) { checkNotCommitted(); requireNonNull(file, "file can't be null"); requireNonNull(readError, "readError can't be null"); fileErrorsPerData.compute(readError.data(), (data, existingList) -> { Set<Component> res = existingList == null ? new HashSet<>() : existingList; res.add(file); return res; }); }
@Test public void addWarning_fails_with_NPE_if_file_is_null() { LineReader.ReadError readError = new LineReader.ReadError(HIGHLIGHTING, 2); assertThatThrownBy(() -> underTest.addWarning(null, readError)) .isInstanceOf(NullPointerException.class) .hasMessage("file can't be null"); }
@UdafFactory(description = "collect distinct values of a Bigint field into a single Array") public static <T> Udaf<T, List<T>, List<T>> createCollectSetT() { return new Collect<>(); }
@Test public void shouldCollectDistinctDates() { final Udaf<Date, List<Date>, List<Date>> udaf = CollectSetUdaf.createCollectSetT(); final Date[] values = new Date[] {new Date(1), new Date(2)}; List<Date> runningList = udaf.initialize(); for (final Date i : values) { runningList = udaf.aggregate(i, runningList); } assertThat(runningList, contains(new Date(1), new Date(2))); }