focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public ObjectNode encode(Instruction instruction, CodecContext context) { checkNotNull(instruction, "Instruction cannot be null"); return new EncodeInstructionCodecHelper(instruction, context).encode(); }
@Test public void modMplsLabelInstructionTest() { final L2ModificationInstruction.ModMplsLabelInstruction instruction = (L2ModificationInstruction.ModMplsLabelInstruction) Instructions.modMplsLabel(MplsLabel.mplsLabel(99)); final ObjectNode instructionJson = instructionCodec.encode(instruction, context); assertThat(instructionJson, matchesInstruction(instruction)); }
protected Set<MessageOutput> getMessageOutputsForStream(Stream stream) { Set<MessageOutput> result = new HashSet<>(); for (Output output : stream.getOutputs()) { final MessageOutput messageOutput = outputRegistry.getOutputForIdAndStream(output.getId(), stream); if (messageOutput != null) { result.add(messageOutput); } } return result; }
@Test public void testGetMessageOutputsForEmptyStream() throws Exception { final Stream stream = mock(Stream.class); final OutputRouter outputRouter = new OutputRouter(defaultMessageOutput, outputRegistry); final Collection<MessageOutput> messageOutputs = outputRouter.getMessageOutputsForStream(stream); assertEquals(0, messageOutputs.size()); }
Queue<String> prepareRollingOrder(List<String> podNamesToConsider, List<Pod> pods) { Deque<String> rollingOrder = new ArrayDeque<>(); for (String podName : podNamesToConsider) { Pod matchingPod = pods.stream().filter(pod -> podName.equals(pod.getMetadata().getName())).findFirst().orElse(null); if (matchingPod == null || !Readiness.isPodReady(matchingPod)) { // Non-existing or unready pods are handled first // This helps to avoid rolling all pods into some situation where they would be all failing rollingOrder.addFirst(podName); } else { // Ready pods are rolled only at the end rollingOrder.addLast(podName); } } return rollingOrder; }
@Test public void testRollingOrderWithAllPodsReady() { List<Pod> pods = List.of( renamePod(READY_POD, "my-connect-connect-0"), renamePod(READY_POD, "my-connect-connect-1"), renamePod(READY_POD, "my-connect-connect-2") ); KafkaConnectRoller roller = new KafkaConnectRoller(RECONCILIATION, CLUSTER, 1_000L, null); Queue<String> rollingOrder = roller.prepareRollingOrder(POD_NAMES, pods); assertThat(rollingOrder.size(), is(3)); assertThat(rollingOrder.poll(), is("my-connect-connect-0")); assertThat(rollingOrder.poll(), is("my-connect-connect-1")); assertThat(rollingOrder.poll(), is("my-connect-connect-2")); }
ReplicaInfo remove(String bpid, Block block) { checkBlockPool(bpid); checkBlock(block); try (AutoCloseDataSetLock l = lockManager.readLock(LockLevel.BLOCK_POOl, bpid)) { LightWeightResizableGSet<Block, ReplicaInfo> m = map.get(bpid); if (m != null) { ReplicaInfo replicaInfo = m.get(block); if (replicaInfo != null && block.getGenerationStamp() == replicaInfo.getGenerationStamp()) { return m.remove(block); } } } return null; }
@Test public void testRemove() { // Test 1: null argument throws invalid argument exception try { map.remove(bpid, null); fail("Expected exception not thrown"); } catch (IllegalArgumentException expected) { } // Test 2: remove failure - generation stamp mismatch Block b = new Block(block); b.setGenerationStamp(0); assertNull(map.remove(bpid, b)); // Test 3: remove failure - blockID mismatch b.setGenerationStamp(block.getGenerationStamp()); b.setBlockId(0); assertNull(map.remove(bpid, b)); // Test 4: remove success assertNotNull(map.remove(bpid, block)); // Test 5: remove failure - invalid blockID assertNull(map.remove(bpid, 0)); // Test 6: remove success map.add(bpid, new FinalizedReplica(block, null, null)); assertNotNull(map.remove(bpid, block.getBlockId())); }
public static boolean isEditionBundled(Plugin plugin) { return SONARSOURCE_ORGANIZATION.equalsIgnoreCase(plugin.getOrganization()) && Arrays.stream(SONARSOURCE_COMMERCIAL_LICENSES).anyMatch(s -> s.equalsIgnoreCase(plugin.getLicense())); }
@Test public void isEditionBundled_on_Plugin_returns_true_for_organization_SonarSource_and_license_Commercial_case_insensitive() { Plugin plugin = newPlugin(randomizeCase("SonarSource"), randomizeCase("Commercial")); assertThat(EditionBundledPlugins.isEditionBundled(plugin)).isTrue(); }
public static void checkValidProjectId(String idToCheck) { if (idToCheck.length() < MIN_PROJECT_ID_LENGTH) { throw new IllegalArgumentException("Project ID " + idToCheck + " cannot be empty."); } if (idToCheck.length() > MAX_PROJECT_ID_LENGTH) { throw new IllegalArgumentException( "Project ID " + idToCheck + " cannot be longer than " + MAX_PROJECT_ID_LENGTH + " characters."); } if (ILLEGAL_PROJECT_CHARS.matcher(idToCheck).find()) { throw new IllegalArgumentException( "Project ID " + idToCheck + " is not a valid ID. Only letters, numbers, hyphens, single quotes, colon, dot and" + " exclamation points are allowed."); } }
@Test public void testCheckValidProjectIdWhenIdContainsIllegalCharacter() { assertThrows(IllegalArgumentException.class, () -> checkValidProjectId("%pr$oject-id%")); }
public static BaggageField create(String name) { return new BaggageField(name, ExtraBaggageContext.get()); }
@Test void toString_onlyHasName() { assertThat(BaggageField.create("Foo")) .hasToString("BaggageField{Foo}"); // case preserved as that's the field name }
public String reqApi(String api, Map<String, String> params, String method) throws NacosException { return reqApi(api, params, Collections.EMPTY_MAP, method); }
@Test void testRegApiForDomain() throws NacosException { assertThrows(NacosException.class, () -> { Map<String, String> params = new HashMap<>(); when(mgr.isDomain()).thenReturn(true); when(mgr.getNacosDomain()).thenReturn("http://test.nacos.domain"); clientProxy.reqApi("api", params, Collections.emptyMap(), Collections.emptyList(), HttpMethod.GET); }); }
public static ResourceId concat(ResourceId prefix, ResourceId path) { checkArgument(!path.nodeKeys().contains(DeviceResourceIds.ROOT_NODE), "%s was already absolute path", path); try { return prefix.copyBuilder().append(path).build(); } catch (CloneNotSupportedException e) { log.error("Could not copy {}", path, e); throw new IllegalArgumentException("Could not copy " + path, e); } }
@Test public void testConcat() { ResourceId devices = ResourceId.builder() .addBranchPointSchema(DeviceResourceIds.DEVICES_NAME, DCS_NAMESPACE) .build(); assertEquals(DEVICES, ResourceIds.concat(ResourceIds.ROOT_ID, devices)); }
public static FilePermissions fromOctalString(String octalPermissions) { if (!octalPermissions.matches(OCTAL_PATTERN)) { throw new IllegalArgumentException( "octalPermissions must be a 3-digit octal number (000-777)"); } return new FilePermissions(Integer.parseInt(octalPermissions, 8)); }
@Test public void testFromOctalString() { Assert.assertEquals(new FilePermissions(0777), FilePermissions.fromOctalString("777")); Assert.assertEquals(new FilePermissions(0000), FilePermissions.fromOctalString("000")); Assert.assertEquals(new FilePermissions(0123), FilePermissions.fromOctalString("123")); Assert.assertEquals(new FilePermissions(0755), FilePermissions.fromOctalString("755")); Assert.assertEquals(new FilePermissions(0644), FilePermissions.fromOctalString("644")); ImmutableList<String> badStrings = ImmutableList.of("abc", "-123", "777444333", "987", "3"); for (String badString : badStrings) { try { FilePermissions.fromOctalString(badString); Assert.fail(); } catch (IllegalArgumentException ex) { Assert.assertEquals( "octalPermissions must be a 3-digit octal number (000-777)", ex.getMessage()); } } }
@Override public List<String> getPermissions() { final Set<String> permissionSet = isServiceAccount() ? new HashSet<>() : new HashSet<>(this.permissions.userSelfEditPermissions(getName())); @SuppressWarnings("unchecked") final List<String> permissions = (List<String>) fields.get(PERMISSIONS); if (permissions != null) { permissionSet.addAll(permissions); } return new ArrayList<>(permissionSet); }
@Test public void getPermissionsWorksWithEmptyPermissions() throws Exception { final Permissions permissions = new Permissions(Collections.emptySet()); final Map<String, Object> fields = Collections.singletonMap(UserImpl.USERNAME, "foobar"); user = createUserImpl(passwordAlgorithmFactory, permissions, fields); assertThat(user.getPermissions()).containsAll(permissions.userSelfEditPermissions("foobar")); }
public Result parse(final String string) throws DateNotParsableException { return this.parse(string, new Date()); }
@Test public void testParseAlignToStartOfDayEuropeBerlin() throws Exception { final NaturalDateParser naturalDateParser = new NaturalDateParser("Antarctica/Palmer"); final DateTimeFormatter df = DateTimeFormat.forPattern("HH:mm:ss"); for(String test: testsThatAlignToStartOfDay) { NaturalDateParser.Result result = naturalDateParser.parse(test); assertNotNull(result.getFrom()); assertNotNull(result.getTo()); assertThat(df.print(result.getFrom())).as("time part of date should equal 00:00:00 in").isEqualTo("00:00:00"); assertThat(df.print(result.getTo())).as("time part of date should equal 00:00:00 in").isEqualTo("00:00:00"); } }
@Override public String normalise(String text) { if (Objects.isNull(text) || text.isEmpty()) { throw new IllegalArgumentException("Text cannot be null or empty"); } return text.trim() .toLowerCase() .replaceAll("\\p{Punct}", "") .replaceAll("\\s+", " "); }
@Description("Normalise, when text is already lowercased and trimmed, then return the same text") @Test void normalise_WhenTextIsAlreadyLowercasedAndTrimmed_ThenReturnTheSameText() { // When var result = textNormaliser.normalise("hello world"); // Then assertThat(result).isEqualTo("hello world"); }
public static String buildWebApplicationRootUrl(NetworkService networkService) { checkNotNull(networkService); if (!isWebService(networkService)) { return "http://" + NetworkEndpointUtils.toUriAuthority(networkService.getNetworkEndpoint()) + "/"; } String rootUrl = (isPlainHttp(networkService) ? "http://" : "https://") + buildWebUriAuthority(networkService) + buildWebAppRootPath(networkService); return rootUrl.endsWith("/") ? rootUrl : rootUrl + "/"; }
@Test public void buildWebApplicationRootUrl_whenHttpWithRootPath_buildsUrlWithExpectedRoot() { assertThat( NetworkServiceUtils.buildWebApplicationRootUrl( NetworkService.newBuilder() .setNetworkEndpoint(forIpAndPort("127.0.0.1", 8080)) .setServiceName("http") .setServiceContext( ServiceContext.newBuilder() .setWebServiceContext( WebServiceContext.newBuilder().setApplicationRoot("/test_root"))) .build())) .isEqualTo("http://127.0.0.1:8080/test_root/"); }
@Nullable public static <T extends Annotation> T extract(Class<?> targetClass, Class<T> annotationClass) { T annotation = null; if (targetClass.isAnnotationPresent(annotationClass)) { annotation = targetClass.getAnnotation(annotationClass); if (annotation == null && logger.isDebugEnabled()) { logger.debug("TargetClass has no annotation '{}'", annotationClass.getSimpleName()); annotation = targetClass.getDeclaredAnnotation(annotationClass); if (annotation == null && logger.isDebugEnabled()) { logger.debug("TargetClass has no declared annotation '{}'", annotationClass.getSimpleName()); } } } return annotation; }
@Test public void testExtract2() { CircuitBreaker circuitBreaker = AnnotationExtractor .extract(NotAnnotatedClass.class, CircuitBreaker.class); assertThat(circuitBreaker).isNull(); }
@VisibleForTesting List<Image> getCachedBaseImages() throws IOException, CacheCorruptedException, BadContainerConfigurationFormatException, LayerCountMismatchException, UnlistedPlatformInManifestListException, PlatformNotFoundInBaseImageException { ImageReference baseImage = buildContext.getBaseImageConfiguration().getImage(); Optional<ImageMetadataTemplate> metadata = buildContext.getBaseImageLayersCache().retrieveMetadata(baseImage); if (!metadata.isPresent()) { return Collections.emptyList(); } ManifestTemplate manifestList = metadata.get().getManifestList(); List<ManifestAndConfigTemplate> manifestsAndConfigs = metadata.get().getManifestsAndConfigs(); if (manifestList == null) { Verify.verify(manifestsAndConfigs.size() == 1); ManifestAndConfigTemplate manifestAndConfig = manifestsAndConfigs.get(0); Optional<Image> cachedImage = getBaseImageIfAllLayersCached(manifestAndConfig, true); if (!cachedImage.isPresent()) { return Collections.emptyList(); } return Collections.singletonList(cachedImage.get()); } // Manifest list cached. Identify matching platforms and check if all of them are cached. ImmutableList.Builder<Image> images = ImmutableList.builder(); for (Platform platform : buildContext.getContainerConfiguration().getPlatforms()) { String manifestDigest = lookUpPlatformSpecificImageManifest((ManifestListTemplate) manifestList, platform); Optional<ManifestAndConfigTemplate> manifestAndConfigFound = manifestsAndConfigs.stream() .filter(entry -> manifestDigest.equals(entry.getManifestDigest())) .findFirst(); if (!manifestAndConfigFound.isPresent()) { return Collections.emptyList(); } Optional<Image> cachedImage = getBaseImageIfAllLayersCached(manifestAndConfigFound.get(), false); if (!cachedImage.isPresent()) { return Collections.emptyList(); } images.add(cachedImage.get()); } return images.build(); }
@Test public void testGetCachedBaseImages_manifestListCached() throws InvalidImageReferenceException, IOException, CacheCorruptedException, UnlistedPlatformInManifestListException, BadContainerConfigurationFormatException, LayerCountMismatchException, PlatformNotFoundInBaseImageException { ImageReference imageReference = ImageReference.parse("cat"); Mockito.when(buildContext.getBaseImageConfiguration()) .thenReturn(ImageConfiguration.builder(imageReference).build()); ContainerConfigurationTemplate containerConfigJson1 = new ContainerConfigurationTemplate(); ContainerConfigurationTemplate containerConfigJson2 = new ContainerConfigurationTemplate(); containerConfigJson1.setContainerUser("user1"); containerConfigJson2.setContainerUser("user2"); ManifestListTemplate manifestList = Mockito.mock(ManifestListTemplate.class); Mockito.when(manifestList.getDigestsForPlatform("arch1", "os1")) .thenReturn(Arrays.asList("sha256:digest1")); Mockito.when(manifestList.getDigestsForPlatform("arch2", "os2")) .thenReturn(Arrays.asList("sha256:digest2")); ImageMetadataTemplate imageMetadata = new ImageMetadataTemplate( manifestList, Arrays.asList( new ManifestAndConfigTemplate( Mockito.mock(BuildableManifestTemplate.class), containerConfigJson1, "sha256:digest1"), new ManifestAndConfigTemplate( Mockito.mock(BuildableManifestTemplate.class), containerConfigJson2, "sha256:digest2"))); Mockito.when(cache.retrieveMetadata(imageReference)).thenReturn(Optional.of(imageMetadata)); Mockito.when( cache.areAllLayersCached(imageMetadata.getManifestsAndConfigs().get(0).getManifest())) .thenReturn(true); Mockito.when( cache.areAllLayersCached(imageMetadata.getManifestsAndConfigs().get(1).getManifest())) .thenReturn(true); Mockito.when(containerConfig.getPlatforms()) .thenReturn(ImmutableSet.of(new Platform("arch1", "os1"), new Platform("arch2", "os2"))); List<Image> images = pullBaseImageStep.getCachedBaseImages(); Assert.assertEquals(2, images.size()); Assert.assertEquals("user1", images.get(0).getUser()); Assert.assertEquals("user2", images.get(1).getUser()); }
@Override public KStream<K, V> merge(final KStream<K, V> stream) { return merge(stream, NamedInternal.empty()); }
@Test public void shouldNotAllowNullKStreamOnMergeWithNamed() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.merge(null, Named.as("merge"))); assertThat(exception.getMessage(), equalTo("stream can't be null")); }
@Override public String encode() { return this.encode(false); }
@Test public void basicTest() { this.testTopic(); this.testQueue(); this.testFileSegment(); ((DefaultMetadataStore) metadataStore).encode(); ((DefaultMetadataStore) metadataStore).encode(false); ((DefaultMetadataStore) metadataStore).encode(true); }
@Private public HistoryClientService getClientService() { return this.clientService; }
@Test (timeout= 50000 ) public void testReports() throws Exception { Configuration config = new Configuration(); config .setClass( CommonConfigurationKeysPublic.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class); RackResolver.init(config); MRApp app = new MRAppWithHistory(1, 1, true, this.getClass().getName(), true); app.submit(config); Job job = app.getContext().getAllJobs().values().iterator().next(); app.waitForState(job, JobState.SUCCEEDED); historyServer = new JobHistoryServer(); historyServer.init(config); historyServer.start(); // search JobHistory service JobHistory jobHistory= null; for (Service service : historyServer.getServices() ) { if (service instanceof JobHistory) { jobHistory = (JobHistory) service; } }; Map<JobId, Job> jobs= jobHistory.getAllJobs(); assertEquals(1, jobs.size()); assertEquals("job_0_0000",jobs.keySet().iterator().next().toString()); Task task = job.getTasks().values().iterator().next(); TaskAttempt attempt = task.getAttempts().values().iterator().next(); HistoryClientService historyService = historyServer.getClientService(); MRClientProtocol protocol = historyService.getClientHandler(); GetTaskAttemptReportRequest gtarRequest = recordFactory .newRecordInstance(GetTaskAttemptReportRequest.class); // test getTaskAttemptReport TaskAttemptId taId = attempt.getID(); taId.setTaskId(task.getID()); taId.getTaskId().setJobId(job.getID()); gtarRequest.setTaskAttemptId(taId); GetTaskAttemptReportResponse response = protocol .getTaskAttemptReport(gtarRequest); assertEquals("container_0_0000_01_000000", response.getTaskAttemptReport() .getContainerId().toString()); assertTrue(response.getTaskAttemptReport().getDiagnosticInfo().isEmpty()); // counters assertNotNull(response.getTaskAttemptReport().getCounters() .getCounter(TaskCounter.PHYSICAL_MEMORY_BYTES)); assertEquals(taId.toString(), response.getTaskAttemptReport() .getTaskAttemptId().toString()); // test getTaskReport GetTaskReportRequest request = recordFactory .newRecordInstance(GetTaskReportRequest.class); TaskId taskId = task.getID(); taskId.setJobId(job.getID()); request.setTaskId(taskId); GetTaskReportResponse reportResponse = protocol.getTaskReport(request); assertEquals("", reportResponse.getTaskReport().getDiagnosticsList() .iterator().next()); // progress assertEquals(1.0f, reportResponse.getTaskReport().getProgress(), 0.01); // report has corrected taskId assertEquals(taskId.toString(), reportResponse.getTaskReport().getTaskId() .toString()); // Task state should be SUCCEEDED assertEquals(TaskState.SUCCEEDED, reportResponse.getTaskReport() .getTaskState()); // For invalid jobid, throw IOException GetTaskReportsRequest gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class); gtreportsRequest.setJobId(TypeConverter.toYarn(JobID .forName("job_1415730144495_0001"))); gtreportsRequest.setTaskType(TaskType.REDUCE); try { protocol.getTaskReports(gtreportsRequest); fail("IOException not thrown for invalid job id"); } catch (IOException e) { // Expected } // test getTaskAttemptCompletionEvents GetTaskAttemptCompletionEventsRequest taskAttemptRequest = recordFactory .newRecordInstance(GetTaskAttemptCompletionEventsRequest.class); taskAttemptRequest.setJobId(job.getID()); GetTaskAttemptCompletionEventsResponse taskAttemptCompletionEventsResponse = protocol .getTaskAttemptCompletionEvents(taskAttemptRequest); assertEquals(0, taskAttemptCompletionEventsResponse.getCompletionEventCount()); // test getDiagnostics GetDiagnosticsRequest diagnosticRequest = recordFactory .newRecordInstance(GetDiagnosticsRequest.class); diagnosticRequest.setTaskAttemptId(taId); GetDiagnosticsResponse diagnosticResponse = protocol .getDiagnostics(diagnosticRequest); // it is strange : why one empty string ? assertEquals(1, diagnosticResponse.getDiagnosticsCount()); assertEquals("", diagnosticResponse.getDiagnostics(0)); }
static void populateSchemaWithConstraints(Schema toPopulate, SimpleTypeImpl t) { if (t.getAllowedValues() != null && !t.getAllowedValues().isEmpty()) { parseSimpleType(DMNOASConstants.X_DMN_ALLOWED_VALUES, toPopulate, t.getAllowedValuesFEEL(), t.getAllowedValues()); } if (t.getTypeConstraint() != null && !t.getTypeConstraint().isEmpty()) { parseSimpleType(DMNOASConstants.X_DMN_TYPE_CONSTRAINTS, toPopulate, t.getTypeConstraintFEEL(), t.getTypeConstraint()); } }
@Test void populateSchemaWithConstraintsForAllowedValues() { List<String> enumBase = Arrays.asList("DMN", "PMML", "JBPMN", "DRL"); List<Object> toEnum = enumBase.stream().map(toMap -> String.format("\"%s\"", toMap)).collect(Collectors.toUnmodifiableList()); String allowedValuesString = String.join(",", toEnum.stream().map(toMap -> String.format("%s", toMap)).toList()); SimpleTypeImpl toRead = getSimpleType(allowedValuesString, null, FEEL_STRING, BuiltInType.STRING); AtomicReference<Schema> toPopulate = new AtomicReference<>(getSchemaForSimpleType(toRead)); DMNTypeSchemas.populateSchemaWithConstraints(toPopulate.get(), toRead); assertEquals(enumBase.size(), toPopulate.get().getEnumeration().size()); enumBase.forEach(en -> assertTrue(toPopulate.get().getEnumeration().contains(en))); assertTrue(toPopulate.get().getExtensions().containsKey(DMNOASConstants.X_DMN_ALLOWED_VALUES)); String retrieved = ((String) toPopulate.get().getExtensions().get(DMNOASConstants.X_DMN_ALLOWED_VALUES)).replace(" ", ""); assertEquals(allowedValuesString, retrieved); toEnum = Arrays.asList(1, 3, 6, 78); allowedValuesString = String.join(",", toEnum.stream().map(toMap -> String.format("%s", toMap)).toList()); toRead = getSimpleType(allowedValuesString, null, FEEL_NUMBER, BuiltInType.NUMBER); toPopulate.set(getSchemaForSimpleType(toRead)); DMNTypeSchemas.populateSchemaWithConstraints(toPopulate.get(), toRead); assertEquals(toEnum.size(), toPopulate.get().getEnumeration().size()); toEnum.stream().map(i -> BigDecimal.valueOf((int) i)).forEach(en -> assertTrue(toPopulate.get().getEnumeration().contains(en))); assertTrue(toPopulate.get().getExtensions().containsKey(DMNOASConstants.X_DMN_ALLOWED_VALUES)); retrieved = ((String) toPopulate.get().getExtensions().get(DMNOASConstants.X_DMN_ALLOWED_VALUES)).replace(" " , ""); assertEquals(allowedValuesString, retrieved); }
@Override public ClassLoader getDefaultClassLoader() { return DEFAULT_CLASS_LOADER; }
@Test public void loadClass_found() { runWithClassloader(provider -> { try { provider.getDefaultClassLoader().loadClass(Object.class.getName()); } catch (ClassNotFoundException e) { Assert.fail("", e); } }); var classLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(null); try (var provider = new CaffeineCachingProvider()) { provider.getDefaultClassLoader().loadClass(Object.class.getName()); } catch (ClassNotFoundException e) { Assert.fail("", e); } finally { Thread.currentThread().setContextClassLoader(classLoader); } }
public boolean audit(HttpServletRequest request, long consumerId) { //ignore GET request if ("GET".equalsIgnoreCase(request.getMethod())) { return true; } String uri = request.getRequestURI(); if (!Strings.isNullOrEmpty(request.getQueryString())) { uri += "?" + request.getQueryString(); } ConsumerAudit consumerAudit = new ConsumerAudit(); Date now = new Date(); consumerAudit.setConsumerId(consumerId); consumerAudit.setUri(uri); consumerAudit.setMethod(request.getMethod()); consumerAudit.setDataChangeCreatedTime(now); consumerAudit.setDataChangeLastModifiedTime(now); //throw away audits if exceeds the max size return this.audits.offer(consumerAudit); }
@Test public void audit() throws Exception { long someConsumerId = 1; String someUri = "someUri"; String someQuery = "someQuery"; String someMethod = "someMethod"; when(request.getRequestURI()).thenReturn(someUri); when(request.getQueryString()).thenReturn(someQuery); when(request.getMethod()).thenReturn(someMethod); SettableFuture<List<ConsumerAudit>> result = SettableFuture.create(); doAnswer((Answer<Void>) invocation -> { Object[] args = invocation.getArguments(); result.set((List<ConsumerAudit>) args[0]); return null; }).when(consumerService).createConsumerAudits(anyCollection()); consumerAuditUtil.audit(request, someConsumerId); List<ConsumerAudit> audits = result.get(batchTimeout * 5, batchTimeUnit); assertEquals(1, audits.size()); ConsumerAudit audit = audits.get(0); assertEquals(String.format("%s?%s", someUri, someQuery), audit.getUri()); assertEquals(someMethod, audit.getMethod()); assertEquals(someConsumerId, audit.getConsumerId()); }
public void setIndexes(List<Index> indexes) { if (this.indexes == null) { this.indexes = new TableIndexes(null); } this.indexes.setIndexes(indexes); }
@Test public void testTableWithLocalTablet() throws IOException { new MockUp<GlobalStateMgr>() { @Mock int getCurrentStateJournalVersion() { return FeConstants.META_VERSION; } }; Database db = UnitTestUtil.createDb(1, 2, 3, 4, 5, 6, 7, KeysType.AGG_KEYS); List<Table> tables = db.getTables(); for (Table table : tables) { if (table.getType() != TableType.OLAP) { continue; } OlapTable tbl = (OlapTable) table; tbl.setIndexes(Lists.newArrayList(new Index("index", Lists.newArrayList(ColumnId.create("col")), IndexDef.IndexType.BITMAP, "xxxxxx"))); System.out.println("orig table id: " + tbl.getId()); MvId mvId1 = new MvId(db.getId(), 10L); tbl.addRelatedMaterializedView(mvId1); MvId mvId2 = new MvId(db.getId(), 20L); tbl.addRelatedMaterializedView(mvId2); MvId mvId3 = new MvId(db.getId(), 30L); tbl.addRelatedMaterializedView(mvId3); Assert.assertEquals(Sets.newHashSet(10L, 20L, 30L), tbl.getRelatedMaterializedViews().stream().map(mvId -> mvId.getId()).collect(Collectors.toSet())); tbl.removeRelatedMaterializedView(mvId1); tbl.removeRelatedMaterializedView(mvId2); Assert.assertEquals(Sets.newHashSet(30L), tbl.getRelatedMaterializedViews().stream().map(mvId -> mvId.getId()).collect(Collectors.toSet())); tbl.removeRelatedMaterializedView(mvId3); Assert.assertEquals(Sets.newHashSet(), tbl.getRelatedMaterializedViews()); } }
@Override public MatchType convert(@NotNull String type) { if (type.contains(DELIMITER)) { String[] matchType = type.split(DELIMITER); return new MatchType(RateLimitType.valueOf(matchType[0].toUpperCase()), matchType[1]); } return new MatchType(RateLimitType.valueOf(type.toUpperCase()), null); }
@Test public void testConvertStringTypeOnly() { MatchType matchType = target.convert("url"); assertThat(matchType).isNotNull(); assertThat(matchType.getType()).isEqualByComparingTo(RateLimitType.URL); assertThat(matchType.getMatcher()).isNull(); }
private HostnameVerifier createHostnameVerifier() { HostnameVerifier defaultHostnameVerifier = new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()); return new HostnameVerifier() { @Override public boolean verify(String host, SSLSession sslSession) { try { Certificate[] certs = sslSession.getPeerCertificates(); if (certs.length == 2) { // Make sure this is one of our certs. More thorough checking would // have already been done by the SSLContext certs[0].verify(caKeyPair.getPublic()); LOG.debug("Verified certificate signed by RM CA, " + "skipping hostname verification"); return true; } } catch (SSLPeerUnverifiedException e) { // No certificate return false; } catch (CertificateException | NoSuchAlgorithmException | InvalidKeyException | SignatureException | NoSuchProviderException e) { // fall back to normal verifier below LOG.debug("Could not verify certificate with RM CA, " + "falling back to default hostname verification", e); } return defaultHostnameVerifier.verify(host, sslSession); } }; }
@Test void testCreateHostnameVerifier() throws Exception { ProxyCA proxyCA = new ProxyCA(); proxyCA.init(); HostnameVerifier verifier = proxyCA.getHostnameVerifier(); SSLSession sslSession = Mockito.mock(SSLSession.class); Mockito.when(sslSession.getPeerCertificates()).thenReturn( KeyStoreTestUtil.bytesToKeyStore( proxyCA.createChildKeyStore( ApplicationId.newInstance(System.currentTimeMillis(), 1), "password"), "password").getCertificateChain("server")); assertTrue(verifier.verify("foo", sslSession)); }
@Override public Iterable<Result> buffer( Flowable<I> flowable ) { Flowable<List<I>> buffer = millis > 0 ? batchSize > 0 ? flowable.buffer( millis, MILLISECONDS, Schedulers.io(), batchSize, ArrayList::new, true ) : flowable.buffer( millis, MILLISECONDS ) : flowable.buffer( batchSize ); return buffer .parallel( parallelism, rxBatchCount ) .runOn( sharedStreamingBatchPoolSize > 0 ? Schedulers.from( sharedStreamingBatchPool ) : Schedulers.io(), rxBatchCount ) .filter( list -> !list.isEmpty() ) .map( this.bufferFilter ) // apply any filtering for data that should no longer be processed .filter( list -> !list.isEmpty() ) // ensure at least one record is left before sending to subtrans .map( this::sendBufferToSubtrans ) .filter( Optional::isPresent ) .map( Optional::get ) .sequential() .doOnNext( this::failOnError ) .doOnNext( postProcessor ) .map( Map.Entry::getValue ) .blockingIterable(); }
@Test public void resultsComeBackToParent() throws KettleException { RowMetaInterface rowMeta = new RowMeta(); rowMeta.addValueMeta( new ValueMetaString( "field" ) ); Result mockResult = new Result(); mockResult.setRows( Arrays.asList( new RowMetaAndData( rowMeta, "queen" ), new RowMetaAndData( rowMeta, "king" ) ) ); when( subtransExecutor.execute( any() ) ).thenReturn( Optional.of( mockResult ) ); when( subtransExecutor.getPrefetchCount() ).thenReturn( 10 ); FixedTimeStreamWindow<List> window = new FixedTimeStreamWindow<>( subtransExecutor, rowMeta, 0, 2, 1 ); window.buffer( Flowable.fromIterable( singletonList( asList( "v1", "v2" ) ) ) ) .forEach( result -> assertEquals( mockResult, result ) ); }
public static boolean canDrop(FilterPredicate pred, List<ColumnChunkMetaData> columns) { Objects.requireNonNull(pred, "pred cannot be null"); Objects.requireNonNull(columns, "columns cannot be null"); return pred.accept(new StatisticsFilter(columns)); }
@Test public void testGt() { assertFalse(canDrop(gt(intColumn, 9), columnMetas)); assertFalse(canDrop(gt(intColumn, 10), columnMetas)); assertTrue(canDrop(gt(intColumn, 100), columnMetas)); assertTrue(canDrop(gt(intColumn, 101), columnMetas)); assertTrue(canDrop(gt(intColumn, 0), nullColumnMetas)); assertTrue(canDrop(gt(intColumn, 7), nullColumnMetas)); assertTrue(canDrop(gt(missingColumn, fromString("any")), columnMetas)); assertFalse(canDrop(gt(intColumn, 0), missingMinMaxColumnMetas)); assertFalse(canDrop(gt(doubleColumn, 0.0), missingMinMaxColumnMetas)); }
static java.sql.Time parseSqlTime(final String value) { try { // JDK format in Time.valueOf is compatible with DATE_FORMAT return Time.valueOf(value); } catch (IllegalArgumentException e) { return throwRuntimeParseException(value, new ParseException(value, 0), SQL_TIME_FORMAT); } }
@Test public void testTimeWithTrailingZeros() throws Exception { // Given Time expectedTime = new Time( new SimpleDateFormat(SQL_TIME_FORMAT) .parse("10:20:30") .getTime() ); // When Time actualTime = DateHelper.parseSqlTime(expectedTime.toString()); // Then assertSqlTimesEqual(expectedTime, actualTime); }
public int startWithRunStrategy( @NotNull WorkflowInstance instance, @NotNull RunStrategy runStrategy) { return withMetricLogError( () -> withRetryableTransaction( conn -> { final long nextInstanceId = getLatestInstanceId(conn, instance.getWorkflowId()) + 1; if (isDuplicated(conn, instance)) { return 0; } completeInstanceInit(conn, nextInstanceId, instance); int res; if (instance.getStatus().isTerminal()) { // Save it directly and send a terminate event res = addTerminatedInstance(conn, instance); } else { switch (runStrategy.getRule()) { case SEQUENTIAL: case PARALLEL: case STRICT_SEQUENTIAL: res = insertInstance(conn, instance, true, null); break; case FIRST_ONLY: res = startFirstOnlyInstance(conn, instance); break; case LAST_ONLY: res = startLastOnlyInstance(conn, instance); break; default: throw new MaestroInternalError( "When start, run strategy [%s] is not supported.", runStrategy); } } if (instance.getWorkflowInstanceId() == nextInstanceId) { updateLatestInstanceId(conn, instance.getWorkflowId(), nextInstanceId); } return res; }), "startWithRunStrategy", "Failed to start a workflow [{}][{}] with run strategy [{}]", instance.getWorkflowId(), instance.getWorkflowUuid(), runStrategy); }
@Test public void testStartWithRunStrategyForDeletedWorkflow() { MaestroTestHelper.removeWorkflow(dataSource, TEST_WORKFLOW_ID); wfi.setWorkflowInstanceId(0L); wfi.setWorkflowRunId(0L); wfi.setWorkflowUuid("test-uuid"); AssertHelper.assertThrows( "cannot start a deleted workflow", MaestroNotFoundException.class, "Cannot find workflow [sample-dag-test-3]", () -> runStrategyDao.startWithRunStrategy(wfi, Defaults.DEFAULT_RUN_STRATEGY)); }
static ConfigServer[] toConfigServers(String configserversString) { return multiValueParameterStream(configserversString) .map(CloudConfigInstallVariables::toConfigServer) .toArray(ConfigServer[]::new); }
@Test public void test_configserver_parsing() { CloudConfigOptions.ConfigServer[] parsed = toConfigServers("myhost.mydomain.com"); assertEquals(1, parsed.length); }
@Override public boolean isTokenExpired(String token) { try { Claims claims = getClaimsFromToken(token); return claims.getExpiration().before(new Date()); } catch (JwtErrorException e) { if (JwtErrorCode.EXPIRED_TOKEN.equals(e.getErrorCode())) return true; throw e; } }
@Test @DisplayName("토큰의 만료되지 않았을 때 false를 반환한다.") public void isTokenExpired() { // given String token = jwtProvider.generateToken(jwtClaims); // when jwtProvider.isTokenExpired(token); // then assertFalse(jwtProvider.isTokenExpired(token)); }
@Override public ASN1Primitive deserialize(Asn1ObjectInputStream in, Class<? extends ASN1Primitive> type, Asn1ObjectMapper mapper) { final ASN1Primitive value; try { value = ASN1Primitive.fromByteArray(in.advanceToByteArray()); } catch (IOException e) { throw new Asn1Exception("Could not decode ASN1Primitive", e); } if (!type.isInstance(value)) { throw new Asn1Exception("Property is a " + value.getClass(), " expected " + type); } return value; }
@Test public void shouldDeserialize() { assertEquals(new ASN1ObjectIdentifier("1.2.3.4"), deserialize( new BouncyCastlePrimitiveConverter(), ASN1ObjectIdentifier.class, new byte[] { 0x06, 3, 0x2a, 0x03, 0x04 }, true )); }
public Stream<Hit> stream() { if (nPostingLists == 0) { return Stream.empty(); } return StreamSupport.stream(new PredicateSpliterator(), false); }
@Test void requireThatMatchCanRequireMultiplePostingLists() { PredicateSearch search = createPredicateSearch( new byte[]{6}, postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(0, 0x00010001)), postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(0, 0x0002000b, 0x00030003)), postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(0, 0x00040003)), postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(0, 0x00050004)), postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(0, 0x00010008, 0x00060006)), postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(0, 0x00020002, 0x000700ff))); assertEquals(List.of(new Hit(0)).toString(), search.stream().toList().toString()); }
public static ByteArrayOutputStream getPayload(MultipartPayload multipartPayload) throws IOException { final ByteArrayOutputStream os = new ByteArrayOutputStream(); final String preamble = multipartPayload.getPreamble(); if (preamble != null) { os.write((preamble + "\r\n").getBytes()); } final List<BodyPartPayload> bodyParts = multipartPayload.getBodyParts(); if (!bodyParts.isEmpty()) { final String boundary = multipartPayload.getBoundary(); final byte[] startBoundary = ("--" + boundary + "\r\n").getBytes(); for (BodyPartPayload bodyPart : bodyParts) { os.write(startBoundary); final Map<String, String> bodyPartHeaders = bodyPart.getHeaders(); if (bodyPartHeaders != null) { for (Map.Entry<String, String> header : bodyPartHeaders.entrySet()) { os.write((header.getKey() + ": " + header.getValue() + "\r\n").getBytes()); } } os.write("\r\n".getBytes()); if (bodyPart instanceof MultipartPayload) { getPayload((MultipartPayload) bodyPart).writeTo(os); } else if (bodyPart instanceof ByteArrayBodyPartPayload) { final ByteArrayBodyPartPayload byteArrayBodyPart = (ByteArrayBodyPartPayload) bodyPart; os.write(byteArrayBodyPart.getPayload(), byteArrayBodyPart.getOff(), byteArrayBodyPart.getLen()); } else { throw new AssertionError(bodyPart.getClass()); } os.write("\r\n".getBytes()); //CRLF for the next (starting or closing) boundary } os.write(("--" + boundary + "--").getBytes()); final String epilogue = multipartPayload.getEpilogue(); if (epilogue != null) { os.write(("\r\n" + epilogue).getBytes()); } } return os; }
@Test public void testComplexMultipartPayload() throws IOException { final MultipartPayload mP = new MultipartPayload("mixed", "unique-boundary-1"); mP.setPreamble("This is the preamble area of a multipart message.\n" + "Mail readers that understand multipart format\n" + "should ignore this preamble.\n" + "\n" + "If you are reading this text, you might want to\n" + "consider changing to a mail reader that understands\n" + "how to properly display multipart messages.\n"); mP.addBodyPart(new ByteArrayBodyPartPayload("... Some text appears here ...".getBytes())); mP.addBodyPart(new ByteArrayBodyPartPayload(("This could have been part of the previous part, but\n" + "illustrates explicit versus implicit typing of body\n" + "parts.\n").getBytes(), "text/plain; charset=US-ASCII")); final MultipartPayload innerMP = new MultipartPayload("parallel", "unique-boundary-2"); mP.addBodyPart(innerMP); final Map<String, String> audioHeaders = new LinkedHashMap<>(); audioHeaders.put("Content-Type", "audio/basic"); audioHeaders.put("Content-Transfer-Encoding", "base64"); innerMP.addBodyPart(new ByteArrayBodyPartPayload(("... base64-encoded 8000 Hz single-channel\n" + " mu-law-format audio data goes here ...").getBytes(), audioHeaders)); final Map<String, String> imageHeaders = new LinkedHashMap<>(); imageHeaders.put("Content-Type", "image/jpeg"); imageHeaders.put("Content-Transfer-Encoding", "base64"); innerMP.addBodyPart(new ByteArrayBodyPartPayload("... base64-encoded image data goes here ...".getBytes(), imageHeaders)); mP.addBodyPart(new ByteArrayBodyPartPayload(("This is <bold><italic>enriched.</italic></bold>\n" + "<smaller>as defined in RFC 1896</smaller>\n" + "\n" + "Isn't it\n" + "<bigger><bigger>cool?</bigger></bigger>\n").getBytes(), "text/enriched")); mP.addBodyPart(new ByteArrayBodyPartPayload(("From: (mailbox in US-ASCII)\n" + "To: (address in US-ASCII)\n" + "Subject: (subject in US-ASCII)\n" + "Content-Type: Text/plain; charset=ISO-8859-1\n" + "Content-Transfer-Encoding: Quoted-printable\n" + "\n" + "... Additional text in ISO-8859-1 goes here ...\n").getBytes(), "message/rfc822")); final StringBuilder headersString = new StringBuilder(); for (Map.Entry<String, String> header : mP.getHeaders().entrySet()) { headersString.append(header.getKey()) .append(": ") .append(header.getValue()) .append("\r\n"); } assertEquals("Content-Type: multipart/mixed; boundary=\"unique-boundary-1\"\r\n", headersString.toString()); assertEquals("This is the preamble area of a multipart message.\n" + "Mail readers that understand multipart format\n" + "should ignore this preamble.\n" + "\n" + "If you are reading this text, you might want to\n" + "consider changing to a mail reader that understands\n" + "how to properly display multipart messages.\n" + "\r\n--unique-boundary-1\r\n" + "\r\n" + "... Some text appears here ..." + "\r\n--unique-boundary-1\r\n" + "Content-Type: text/plain; charset=US-ASCII\r\n" + "\r\n" + "This could have been part of the previous part, but\n" + "illustrates explicit versus implicit typing of body\n" + "parts.\n" + "\r\n--unique-boundary-1\r\n" + "Content-Type: multipart/parallel; boundary=\"unique-boundary-2\"\r\n" + "\r\n--unique-boundary-2\r\n" + "Content-Type: audio/basic\r\n" + "Content-Transfer-Encoding: base64\r\n" + "\r\n" + "... base64-encoded 8000 Hz single-channel\n" + " mu-law-format audio data goes here ..." + "\r\n--unique-boundary-2\r\n" + "Content-Type: image/jpeg\r\n" + "Content-Transfer-Encoding: base64\r\n" + "\r\n" + "... base64-encoded image data goes here ..." + "\r\n--unique-boundary-2--" + "\r\n--unique-boundary-1\r\n" + "Content-Type: text/enriched\r\n" + "\r\n" + "This is <bold><italic>enriched.</italic></bold>\n" + "<smaller>as defined in RFC 1896</smaller>\n" + "\n" + "Isn't it\n" + "<bigger><bigger>cool?</bigger></bigger>\n" + "\r\n--unique-boundary-1\r\n" + "Content-Type: message/rfc822\r\n" + "\r\n" + "From: (mailbox in US-ASCII)\n" + "To: (address in US-ASCII)\n" + "Subject: (subject in US-ASCII)\n" + "Content-Type: Text/plain; charset=ISO-8859-1\n" + "Content-Transfer-Encoding: Quoted-printable\n" + "\n" + "... Additional text in ISO-8859-1 goes here ...\n" + "\r\n--unique-boundary-1--", MultipartUtils.getPayload(mP).toString()); }
@Override public Map<ApplicationId, ApplicationReport> getApplications(long appsNum, long appStartedTimeBegin, long appStartedTimeEnd) throws IOException { Map<ApplicationId, ApplicationHistoryData> histData = historyStore.getAllApplications(); HashMap<ApplicationId, ApplicationReport> applicationsReport = new HashMap<ApplicationId, ApplicationReport>(); int count = 0; for (Entry<ApplicationId, ApplicationHistoryData> entry : histData .entrySet()) { if (count == appsNum) { break; } long appStartTime = entry.getValue().getStartTime(); if (appStartTime < appStartedTimeBegin || appStartTime > appStartedTimeEnd) { continue; } applicationsReport.put(entry.getKey(), convertToApplicationReport(entry.getValue())); count++; } return applicationsReport; }
@Test void testApplications() throws IOException { ApplicationId appId1 = ApplicationId.newInstance(0, 1); ApplicationId appId2 = ApplicationId.newInstance(0, 2); ApplicationId appId3 = ApplicationId.newInstance(0, 3); writeApplicationStartData(appId1, 1000); writeApplicationFinishData(appId1); writeApplicationStartData(appId2, 3000); writeApplicationFinishData(appId2); writeApplicationStartData(appId3, 4000); writeApplicationFinishData(appId3); Map<ApplicationId, ApplicationReport> reports = applicationHistoryManagerImpl.getApplications(2, 2000L, 5000L); assertNotNull(reports); assertEquals(2, reports.size()); assertNull(reports.get("1")); assertNull(reports.get("2")); assertNull(reports.get("3")); }
@Override public boolean checkExists(String path) { try { if (client.checkExists().forPath(path) != null) { return true; } } catch (Exception ignored) { } return false; }
@Test void testCheckExists() { String path = "/dubbo/org.apache.dubbo.demo.DemoService/providers"; curatorClient.create(path, false, true); assertThat(curatorClient.checkExists(path), is(true)); assertThat(curatorClient.checkExists(path + "/noneexits"), is(false)); }
public void createSubscriptionGroup(final String addr, final SubscriptionGroupConfig config, final long timeoutMillis) throws RemotingException, InterruptedException, MQClientException { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.UPDATE_AND_CREATE_SUBSCRIPTIONGROUP, null); byte[] body = RemotingSerializable.encode(config); request.setBody(body); RemotingCommand response = this.remotingClient.invokeSync(MixAll.brokerVIPChannel(this.clientConfig.isVipChannelEnabled(), addr), request, timeoutMillis); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { return; } default: break; } throw new MQClientException(response.getCode(), response.getRemark()); }
@Test public void testCreateSubscriptionGroup_Success() throws Exception { doAnswer((Answer<RemotingCommand>) mock -> { RemotingCommand request = mock.getArgument(1); RemotingCommand response = RemotingCommand.createResponseCommand(null); response.setCode(ResponseCode.SUCCESS); response.setOpaque(request.getOpaque()); return response; }).when(remotingClient).invokeSync(anyString(), any(RemotingCommand.class), anyLong()); mqClientAPI.createSubscriptionGroup(brokerAddr, new SubscriptionGroupConfig(), 10000); }
public void write(RowMutation tableRow) throws BigtableResourceManagerException { write(ImmutableList.of(tableRow)); }
@Test public void testWriteShouldThrowErrorWhenInstanceDoesNotExist() { assertThrows( IllegalStateException.class, () -> testManager.write(RowMutation.create(TABLE_ID, "sample-key"))); assertThrows( IllegalStateException.class, () -> testManager.write(ImmutableList.of(RowMutation.create(TABLE_ID, "sample-key")))); }
public RequestUrl toRequestUrl(String matchUrl) { Map<String, String> params = new HashMap<>(); Iterator<UrlPathPart> iter1 = new MatchUrl(matchUrl).pathParts.iterator(); Iterator<UrlPathPart> iter2 = pathParts.iterator(); while (iter1.hasNext() && iter2.hasNext()) { UrlPathPart matchUrlPathPart = iter1.next(); UrlPathPart actualUrlPathPart = iter2.next(); if (matchUrlPathPart instanceof UrlParamPathPart) { params.put(matchUrlPathPart.part(), actualUrlPathPart.part()); } } return new RequestUrl(url, params); }
@Test void testToRequestUrlWithQueryParams() { RequestUrl requestUrl = new MatchUrl("/api/jobs/enqueued?offset=2").toRequestUrl("/api/jobs/:state"); assertThat(requestUrl.param(":state")).isEqualTo("enqueued"); }
@Override public DirectPipelineResult run(Pipeline pipeline) { try { options = MAPPER .readValue(MAPPER.writeValueAsBytes(options), PipelineOptions.class) .as(DirectOptions.class); } catch (IOException e) { throw new IllegalArgumentException( "PipelineOptions specified failed to serialize to JSON.", e); } performRewrites(pipeline); MetricsEnvironment.setMetricsSupported(true); try { DirectGraphVisitor graphVisitor = new DirectGraphVisitor(); pipeline.traverseTopologically(graphVisitor); @SuppressWarnings("rawtypes") KeyedPValueTrackingVisitor keyedPValueVisitor = KeyedPValueTrackingVisitor.create(); pipeline.traverseTopologically(keyedPValueVisitor); DisplayDataValidator.validatePipeline(pipeline); DisplayDataValidator.validateOptions(options); ExecutorService metricsPool = Executors.newCachedThreadPool( new ThreadFactoryBuilder() .setThreadFactory(MoreExecutors.platformThreadFactory()) .setDaemon(false) // otherwise you say you want to leak, please don't! .setNameFormat("direct-metrics-counter-committer") .build()); DirectGraph graph = graphVisitor.getGraph(); EvaluationContext context = EvaluationContext.create( clockSupplier.get(), Enforcement.bundleFactoryFor(enabledEnforcements, graph), graph, keyedPValueVisitor.getKeyedPValues(), metricsPool); TransformEvaluatorRegistry registry = TransformEvaluatorRegistry.javaSdkNativeRegistry(context, options); PipelineExecutor executor = ExecutorServiceParallelExecutor.create( options.getTargetParallelism(), registry, Enforcement.defaultModelEnforcements(enabledEnforcements), context, metricsPool); executor.start(graph, RootProviderRegistry.javaNativeRegistry(context, options)); DirectPipelineResult result = new DirectPipelineResult(executor, context); if (options.isBlockOnRun()) { try { result.waitUntilFinish(); } catch (UserCodeException userException) { throw new PipelineExecutionException(userException.getCause()); } catch (Throwable t) { if (t instanceof RuntimeException) { throw (RuntimeException) t; } throw new RuntimeException(t); } } return result; } finally { MetricsEnvironment.setMetricsSupported(false); } }
@Test public void tearsDownFnsBeforeFinishing() { TEARDOWN_CALL.set(-1); final Pipeline pipeline = getPipeline(); pipeline .apply(Create.of("a")) .apply( ParDo.of( new DoFn<String, String>() { @ProcessElement public void onElement(final ProcessContext ctx) { // no-op } @Teardown public void teardown() { // just to not have a fast execution hiding an issue until we have a shutdown // callback try { Thread.sleep(1000); } catch (final InterruptedException e) { throw new AssertionError(e); } TEARDOWN_CALL.set(System.nanoTime()); } })); final PipelineResult pipelineResult = pipeline.run(); pipelineResult.waitUntilFinish(); final long doneTs = System.nanoTime(); final long tearDownTs = TEARDOWN_CALL.get(); assertThat(tearDownTs, greaterThan(0L)); assertThat(doneTs, greaterThan(tearDownTs)); }
public static HttpServerResponse create(@Nullable HttpServletRequest request, HttpServletResponse response, @Nullable Throwable caught) { return new HttpServletResponseWrapper(request, response, caught); }
@Test void nullRequestOk() { HttpServletResponseWrapper.create(null, response, null); }
@Override public boolean isGenerateSQLToken(final SQLStatementContext sqlStatementContext) { return sqlStatementContext instanceof InsertStatementContext && (((InsertStatementContext) sqlStatementContext).getSqlStatement()).getOnDuplicateKeyColumns().isPresent(); }
@Test void assertIsGenerateSQLToken() { InsertStatementContext insertStatementContext = mock(InsertStatementContext.class); MySQLInsertStatement insertStatement = mock(MySQLInsertStatement.class); when(insertStatementContext.getSqlStatement()).thenReturn(insertStatement); when(insertStatement.getOnDuplicateKeyColumns()).thenReturn(Optional.of(new OnDuplicateKeyColumnsSegment(0, 0, Collections.emptyList()))); assertTrue(generator.isGenerateSQLToken(insertStatementContext)); }
public static LocalDateTime inferFileStartTime(File f) throws FileNotFoundException { int r = 0; try (Scanner scanner = new Scanner(f)) { while (scanner.hasNextLine() && r < TRY_PARSE_LOG_ROWS) { String line = scanner.nextLine(); LocalDateTime datetime = parseDateTime(line); if (datetime != null) { return datetime; } r++; } } return null; }
@Test public void inferDateFromLog() throws Exception { // A piece of Alluxio log with default format String alluxioLog = "2020-03-19 11:58:10,104 WARN Configuration - Reloaded properties\n" + "2020-03-19 11:58:10,106 WARN Configuration - Loaded hostname localhost\n" + "2020-03-19 11:58:10,591 WARN RetryUtils - Failed to load cluster default.."; File alluxioLogFile = new File(mTestDir, "alluxio-worker.log"); writeToFile(alluxioLogFile, alluxioLog); LocalDateTime alluxioLogDatetime = CollectLogCommand.inferFileStartTime(alluxioLogFile); LocalDateTime expectedDatetime = LocalDateTime.of(2020, 3, 19, 11, 58, 10, 104 * MILLISEC_TO_NANOSEC); assertThat(String.format("Expected datetime is %s but inferred %s from file%n", expectedDatetime, alluxioLogDatetime), alluxioLogDatetime, new DatetimeMatcher().setDatetime(expectedDatetime)); // A piece of sample Yarn application log with default format // The first >20 lines are un-timestamped information about the job String yarnAppLog = "\nLogged in as: user\nApplication\nAbout\nJobs\nTools\n" + "Log Type: container-localizer-syslog\n\n" + "Log Upload Time: Mon May 18 16:11:22 +0800 2020\n\n" + "Log Length: 0\n\n\nLog Type: stderr\n\n" + "Log Upload Time: Mon May 18 16:11:22 +0800 2020\n\n" + "Log Length: 3616\n\n" + "20/05/18 16:11:18 INFO util.SignalUtils: Registered signal handler for TERM\n" + "20/05/18 16:11:18 INFO util.SignalUtils: Registered signal handler for HUP\n" + "20/05/18 16:11:18 INFO util.SignalUtils: Registered signal handler for INT"; File yarnAppLogFile = new File(mTestDir, "yarn-application.log"); writeToFile(yarnAppLogFile, yarnAppLog); LocalDateTime yarnAppDatetime = CollectLogCommand.inferFileStartTime(yarnAppLogFile); expectedDatetime = LocalDateTime.of(2020, 5, 18, 16, 11, 18); assertThat(String.format("Expected datetime is %s but inferred %s from file%n", expectedDatetime, yarnAppDatetime), yarnAppDatetime, new DatetimeMatcher().setDatetime(expectedDatetime)); // A piece of Yarn log with default format String yarnLog = "2020-05-16 02:02:25,855 INFO org.apache.hadoop.yarn.server.resourcemanager" + ".rmcontainer.RMContainerImpl: container_e103_1584954066020_230169_01_000004 " + "Container Transitioned from ALLOCATED to ACQUIRED\n" + "2020-05-16 02:02:25,909 INFO org.apache.hadoop.yarn.server.resourcemanager" + ".scheduler.AppSchedulingInfo: checking for deactivate... \n" + "2020-05-16 02:02:26,006 INFO org.apache.hadoop.yarn.server.resourcemanager" + ".rmcontainer.RMContainerImpl: container_e103_1584954066020_230168_01_000047 " + "Container Transitioned from ALLOCATED to ACQUIRED"; File yarnLogFile = new File(mTestDir, "yarn-rm.log"); writeToFile(yarnLogFile, yarnLog); LocalDateTime yarnDatetime = CollectLogCommand.inferFileStartTime(yarnLogFile); expectedDatetime = LocalDateTime.of(2020, 5, 16, 2, 2, 25, 855 * MILLISEC_TO_NANOSEC); assertThat(String.format("Expected datetime is %s but inferred %s from file%n", expectedDatetime, yarnDatetime), yarnDatetime, new DatetimeMatcher().setDatetime(expectedDatetime)); // A piece of ZK log with default format String zkLog = "2020-05-14 21:05:53,822 WARN org.apache.zookeeper.server.NIOServerCnxn: " + "caught end of stream exception\n" + "EndOfStreamException: Unable to read additional data from client sessionid.., " + "likely client has closed socket\n" + "\tat org.apache.zookeeper.server.NIOServerCnxn.doIO(NIOServerCnxn.java:241)\n" + "\tat org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory..)\n" + "\tat java.lang.Thread.run(Thread.java:748)\n" + "2020-05-14 21:05:53,823 INFO org.apache.zookeeper.server.NIOServerCnxn: " + "Closed socket connection for client /10.64.23.190:50120 which had sessionid..\n" + "2020-05-14 21:05:53,911 WARN org.apache.zookeeper.server.NIOServerCnxn: " + "caught end of stream exception\n" + "EndOfStreamException: Unable to read additional data from client sessionid.." + "likely client has closed socket\n"; File zkLogFile = new File(mTestDir, "zk.log"); writeToFile(zkLogFile, zkLog); LocalDateTime zkDatetime = CollectLogCommand.inferFileStartTime(zkLogFile); expectedDatetime = LocalDateTime.of(2020, 5, 14, 21, 5, 53, 822 * MILLISEC_TO_NANOSEC); assertThat(String.format("Expected datetime is %s but inferred %s from file%n", expectedDatetime, zkDatetime), zkDatetime, new DatetimeMatcher().setDatetime(expectedDatetime)); // A piece of sample HDFS log with default format String hdfsLog = "2020-05-15 22:02:27,878 INFO BlockStateChange: BLOCK* addStoredBlock: " + "blockMap updated: 10.64.23.184:1025 is added to blk_1126197354_52572663 size 0..\n" + "2020-05-15 22:02:27,878 INFO BlockStateChange: BLOCK* addStoredBlock: blockMap " + "updated: 10.70.22.117:1025 is added to blk_1126197354_52572663 size 0.."; File hdfsLogFile = new File(mTestDir, "hdfs.log"); writeToFile(hdfsLogFile, hdfsLog); LocalDateTime hdfsDatetime = CollectLogCommand.inferFileStartTime(hdfsLogFile); expectedDatetime = LocalDateTime.of(2020, 5, 15, 22, 2, 27, 878 * MILLISEC_TO_NANOSEC); assertThat(String.format("Expected datetime is %s but inferred %s from file%n", expectedDatetime, hdfsDatetime), hdfsDatetime, new DatetimeMatcher().setDatetime(expectedDatetime)); // A piece of sample Presto log wtih default format String prestoLog = "2020-05-16T00:00:01.059+0800\tINFO\tdispatcher-query-7960" + "\tio.prestosql.event.QueryMonitor\tTIMELINE: Query 20200515_155959_06700_6r6b4" + " :: Transaction:[4d30e960-c319-439c-84dd-022ddab6fa5e] :: elapsed 1208ms :: " + "planning 0ms :: waiting 0ms :: scheduling 1208ms :: running 0ms :: finishing 1208ms" + " :: begin 2020-05-15T23:59:59.850+08:00 :: end 2020-05-16T00:00:01.058+08:00\n" + "2020-05-16T00:00:03.530+0800\tINFO\tdispatcher-query-7948\t" + "io.prestosql.event.QueryMonitor\tTIMELINE: Query 20200515_160001_06701_6r6b4" + " :: Transaction:[be9b396e-6697-4ecd-9782-2ca1174c1be1] :: elapsed 2316ms" + " :: planning 0ms :: waiting 0ms :: scheduling 2316ms :: running 0ms" + " :: finishing 2316ms :: begin 2020-05-16T00:00:01.212+08:00" + " :: end 2020-05-16T00:00:03.528+08:00"; File prestoLogFile = new File(mTestDir, "presto.log"); writeToFile(prestoLogFile, prestoLog); LocalDateTime prestoDatetime = CollectLogCommand.inferFileStartTime(prestoLogFile); expectedDatetime = LocalDateTime.of(2020, 5, 16, 0, 0, 1, 59 * MILLISEC_TO_NANOSEC); assertThat(String.format("Expected datetime is %s but inferred %s from file%n", expectedDatetime, prestoDatetime), prestoDatetime, new DatetimeMatcher().setDatetime(expectedDatetime)); // ParNew/CMS GC log default format String gcLog = "Java HotSpot(TM) 64-Bit Server VM (25.151-b12) for linux-amd64 JRE (1.8.0), " + "built on Sep 5 2017 19:20:58 by \"java_re\" with gcc 4.3.0 20080428..\n" + "Memory: 4k page, physical 1979200k(1940096k free), swap 332799k(332799k free)\n" + "CommandLine flags: -XX:CMSInitiatingOccupancyFraction=65 -XX:InitialHeapSize=..\n" + "2020-05-07T10:01:11.409+0800: 4.304: [GC (GCLocker Initiated GC) " + "2020-05-07T10:01:11.410+0800: 4.304: [ParNew: 25669137K->130531K(28311552K), " + "0.4330954 secs] 25669137K->130531K(147849216K), 0.4332463 secs] " + "[Times: user=4.50 sys=0.08, real=0.44 secs] "; File gcLogFile = new File(mTestDir, "gc.log"); writeToFile(gcLogFile, gcLog); LocalDateTime gcDatetime = CollectLogCommand.inferFileStartTime(gcLogFile); expectedDatetime = LocalDateTime.of(2020, 5, 7, 10, 1, 11, 409 * MILLISEC_TO_NANOSEC); assertThat(String.format("Expected datetime is %s but inferred %s from file%n", expectedDatetime, gcDatetime), gcDatetime, new DatetimeMatcher().setDatetime(expectedDatetime)); }
public static LogMonitor createLogMonitor(SparkLoadAppHandle handle) { return new LogMonitor(handle); }
@Test public void testLogMonitorNormal() { URL log = getClass().getClassLoader().getResource("spark_launcher_monitor.log"); String cmd = "cat " + log.getPath(); SparkLoadAppHandle handle = null; try { Process process = Runtime.getRuntime().exec(cmd); handle = new SparkLoadAppHandle(process); SparkLauncherMonitor.LogMonitor logMonitor = SparkLauncherMonitor.createLogMonitor(handle); logMonitor.setRedirectLogPath(logPath); logMonitor.start(); try { logMonitor.join(); } catch (InterruptedException e) { } } catch (IOException e) { Assert.fail(); } // check values Assert.assertEquals(appId, handle.getAppId()); Assert.assertEquals(state, handle.getState()); Assert.assertEquals(queue, handle.getQueue()); Assert.assertEquals(startTime, handle.getStartTime()); Assert.assertEquals(finalApplicationStatus, handle.getFinalStatus()); Assert.assertEquals(trackingUrl, handle.getUrl()); Assert.assertEquals(user, handle.getUser()); // check log File file = new File(logPath); Assert.assertTrue(file.exists()); }
public Map<String, Object> getKsqlFunctionsConfigProps(final String functionName) { final Map<String, Object> udfProps = originalsWithPrefix( KSQL_FUNCTIONS_PROPERTY_PREFIX + functionName.toLowerCase(), false); final Map<String, Object> globals = originalsWithPrefix( KSQ_FUNCTIONS_GLOBAL_PROPERTY_PREFIX, false); udfProps.putAll(globals); return udfProps; }
@Test public void shouldReturnGlobalUdfConfig() { // Given: final String globalConfigName = KsqlConfig.KSQ_FUNCTIONS_GLOBAL_PROPERTY_PREFIX + ".some-setting"; final KsqlConfig config = new KsqlConfig(ImmutableMap.of( globalConfigName, "global" )); // When: final Map<String, ?> udfProps = config.getKsqlFunctionsConfigProps("what-eva"); // Then: assertThat(udfProps.get(globalConfigName), is("global")); }
@Override public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { final Handler handler = handlers.getBest(baseRequest.getRequestURI()); if (handler != null) { handler.handle(target, baseRequest, request, response); } }
@Test void routesToTheLongestPrefixMatch() throws Exception { when(baseRequest.getRequestURI()).thenReturn("/admin/woo"); handler.handle("/admin/woo", baseRequest, request, response); verify(handler1, never()).handle("/admin/woo", baseRequest, request, response); verify(handler2).handle("/admin/woo", baseRequest, request, response); }
public Upstream choose(final String serviceId, final String selectorId, final String ip, final String loadbalancer) { // load service instance by serviceId List<ServiceInstance> available = this.getServiceInstance(serviceId); if (CollectionUtils.isEmpty(available)) { LOG.info("choose return 1"); return null; } final SpringCloudSelectorHandle springCloudSelectorHandle = SpringCloudPluginDataHandler.SELECTOR_CACHED.get().obtainHandle(selectorId); // not gray flow if (!springCloudSelectorHandle.getGray()) { // load service from register center return this.doSelect(serviceId, ip, loadbalancer); } List<Upstream> divideUpstreams = UpstreamCacheManager.getInstance().findUpstreamListBySelectorId(selectorId); // gray flow,but upstream is null if (CollectionUtils.isEmpty(divideUpstreams)) { return this.doSelect(serviceId, ip, loadbalancer); } // select server from available to choose final List<Upstream> choose = new ArrayList<>(available.size()); for (ServiceInstance serviceInstance : available) { divideUpstreams.stream() .filter(Upstream::isStatus) .filter(upstream -> Objects.equals(upstream.getUrl(), serviceInstance.getUri().getRawAuthority())) .findFirst().ifPresent(choose::add); } if (CollectionUtils.isEmpty(choose)) { return this.doSelect(serviceId, ip, loadbalancer); } // select by divideUpstreams return this.doSelect(choose, loadbalancer, ip); }
@Test public void testLoadBalancer() { final List<DefaultServiceInstance> serviceInstances = new ArrayList<>(); DefaultServiceInstance defaultServiceInstance = new DefaultServiceInstance(); defaultServiceInstance.setServiceId("serviceId"); defaultServiceInstance.setUri(URI.create("http://localhost:8081")); defaultServiceInstance.setInstanceId("serviceId"); defaultServiceInstance.setPort(8081); defaultServiceInstance.setHost("localhost"); DefaultServiceInstance defaultServiceInstance2 = new DefaultServiceInstance(); defaultServiceInstance2.setServiceId("serviceId"); defaultServiceInstance2.setUri(URI.create("http://localhost:8080")); defaultServiceInstance2.setInstanceId("serviceId"); defaultServiceInstance2.setPort(8080); defaultServiceInstance2.setHost("localhost"); serviceInstances.add(defaultServiceInstance); serviceInstances.add(defaultServiceInstance2); SimpleDiscoveryProperties simpleDiscoveryProperties = new SimpleDiscoveryProperties(); Map<String, List<DefaultServiceInstance>> serviceInstanceMap = new HashMap<>(); serviceInstanceMap.put(defaultServiceInstance.getInstanceId(), serviceInstances); simpleDiscoveryProperties.setInstances(serviceInstanceMap); final SimpleDiscoveryClient simpleDiscoveryClient = new SimpleDiscoveryClient(simpleDiscoveryProperties); final ShenyuSpringCloudServiceChooser shenyuServiceChoose = new ShenyuSpringCloudServiceChooser(simpleDiscoveryClient); final String ip = "0.0.0.0"; final String selectorId = "1"; final String loadbalancer = "roundRobin"; final SpringCloudSelectorHandle springCloudSelectorHandle = SpringCloudSelectorHandle.builder() .serviceId("serviceId") .gray(false) .build(); final SelectorData selectorData = SelectorData.builder() .handle(GsonUtils.getInstance().toJson(springCloudSelectorHandle)) .id("1") .build(); springCloudPluginDataHandler.handlerSelector(selectorData); Upstream upstream1 = shenyuServiceChoose.choose("serviceId", selectorId, ip, loadbalancer); Upstream upstream2 = shenyuServiceChoose.choose("serviceId", selectorId, ip, loadbalancer); // if roundRobin, upstream1 not equals upstream2 Assertions.assertNotEquals(upstream1, upstream2); }
@Override public boolean find(Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { try { final boolean found; if(containerService.isContainer(file)) { final CloudBlobContainer container = session.getClient().getContainerReference(containerService.getContainer(file).getName()); return container.exists(null, null, context); } if(file.isFile() || file.isPlaceholder()) { try { final CloudBlob blob = session.getClient().getContainerReference(containerService.getContainer(file).getName()) .getBlobReferenceFromServer(containerService.getKey(file)); return blob.exists(null, null, context); } catch(StorageException e) { switch(e.getHttpStatusCode()) { case HttpStatus.SC_NOT_FOUND: if(file.isPlaceholder()) { // Ignore failure and look for common prefix break; } default: throw e; } } } if(log.isDebugEnabled()) { log.debug(String.format("Search for common prefix %s", file)); } // Check for common prefix try { new AzureObjectListService(session, context).list(file, new CancellingListProgressListener()); return true; } catch(ListCanceledException l) { // Found common prefix return true; } } catch(StorageException e) { throw new AzureExceptionMappingService().map("Failure to read attributes of {0}", e, file); } catch(URISyntaxException e) { return false; } } catch(NotfoundException e) { return false; } }
@Test public void testFindHome() throws Exception { assertTrue(new AzureFindFeature(session, null).find(new DefaultHomeFinderService(session).find())); }
static Properties resolveConsumerProperties(Map<String, String> options, Object keySchema, Object valueSchema) { Properties properties = from(options); withSerdeConsumerProperties(true, options, keySchema, properties); withSerdeConsumerProperties(false, options, valueSchema, properties); return properties; }
@Test public void test_consumerProperties_absentFormat() { assertThat(resolveConsumerProperties(emptyMap())) .containsExactlyEntriesOf(Map.of(KEY_DESERIALIZER, ByteArrayDeserializer.class.getCanonicalName())); }
@Override public Rule getByKey(RuleKey key) { verifyKeyArgument(key); ensureInitialized(); Rule rule = rulesByKey.get(key); checkArgument(rule != null, "Can not find rule for key %s. This rule does not exist in DB", key); return rule; }
@Test public void getByKey_throws_IAE_if_rules_does_not_exist_in_DB() { expectIAERuleNotFound(() -> underTest.getByKey(AC_RULE_KEY), AC_RULE_KEY); }
public static URI createRemainingURI(URI originalURI, Map<String, Object> params) throws URISyntaxException { String s = createQueryString(params); if (s.isEmpty()) { s = null; } return createURIWithQuery(originalURI, s); }
@Test public void testCreateRemainingURI() throws Exception { URI original = new URI("http://camel.apache.org"); Map<String, Object> param = new HashMap<>(); param.put("foo", "123"); URI newUri = URISupport.createRemainingURI(original, param); assertNotNull(newUri); String s = newUri.toString(); assertEquals("http://camel.apache.org?foo=123", s); }
public static int toIntSize(long size) { assert size >= 0 : "Invalid size value: " + size; return size > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) size; }
@Test public void toIntSize_whenGreaterThanIntMax() { long size = Integer.MAX_VALUE + 1L; assertEquals(Integer.MAX_VALUE, toIntSize(size)); }
public MethodBuilder executes(Integer executes) { this.executes = executes; return getThis(); }
@Test void executes() { MethodBuilder builder = MethodBuilder.newBuilder(); builder.executes(1); Assertions.assertEquals(1, builder.build().getExecutes()); }
@Override public void onEvent(Event event) { if (event instanceof ServiceEvent.ServiceChangedEvent) { // If service changed, push to all subscribers. ServiceEvent.ServiceChangedEvent serviceChangedEvent = (ServiceEvent.ServiceChangedEvent) event; Service service = serviceChangedEvent.getService(); delayTaskEngine.addTask(service, new PushDelayTask(service, PushConfig.getInstance().getPushTaskDelay())); MetricsMonitor.incrementServiceChangeCount(service); } else if (event instanceof ServiceEvent.ServiceSubscribedEvent) { // If service is subscribed by one client, only push this client. ServiceEvent.ServiceSubscribedEvent subscribedEvent = (ServiceEvent.ServiceSubscribedEvent) event; Service service = subscribedEvent.getService(); delayTaskEngine.addTask(service, new PushDelayTask(service, PushConfig.getInstance().getPushTaskDelay(), subscribedEvent.getClientId())); } }
@Test void onEvent() { subscriberService.onEvent(new ServiceEvent.ServiceChangedEvent(service)); verify(delayTaskEngine).addTask(eq(service), any(PushDelayTask.class)); }
public static KubernetesJobManagerSpecification buildKubernetesJobManagerSpecification( FlinkPod podTemplate, KubernetesJobManagerParameters kubernetesJobManagerParameters) throws IOException { FlinkPod flinkPod = Preconditions.checkNotNull(podTemplate).copy(); List<HasMetadata> accompanyingResources = new ArrayList<>(); final List<KubernetesStepDecorator> stepDecorators = new ArrayList<>( Arrays.asList( new InitJobManagerDecorator(kubernetesJobManagerParameters), new EnvSecretsDecorator(kubernetesJobManagerParameters), new MountSecretsDecorator(kubernetesJobManagerParameters), new CmdJobManagerDecorator(kubernetesJobManagerParameters), new InternalServiceDecorator(kubernetesJobManagerParameters), new ExternalServiceDecorator(kubernetesJobManagerParameters))); Configuration configuration = kubernetesJobManagerParameters.getFlinkConfiguration(); if (configuration.get(KUBERNETES_HADOOP_CONF_MOUNT_DECORATOR_ENABLED)) { stepDecorators.add(new HadoopConfMountDecorator(kubernetesJobManagerParameters)); } if (configuration.get(KUBERNETES_KERBEROS_MOUNT_DECORATOR_ENABLED)) { stepDecorators.add(new KerberosMountDecorator(kubernetesJobManagerParameters)); } stepDecorators.addAll( Arrays.asList( new FlinkConfMountDecorator(kubernetesJobManagerParameters), new PodTemplateMountDecorator(kubernetesJobManagerParameters))); for (KubernetesStepDecorator stepDecorator : stepDecorators) { flinkPod = stepDecorator.decorateFlinkPod(flinkPod); accompanyingResources.addAll(stepDecorator.buildAccompanyingKubernetesResources()); } final Deployment deployment = createJobManagerDeployment(flinkPod, kubernetesJobManagerParameters); return new KubernetesJobManagerSpecification(deployment, accompanyingResources); }
@Test void testPodSpec() throws IOException { kubernetesJobManagerSpecification = KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification( flinkPod, kubernetesJobManagerParameters); final PodSpec resultPodSpec = this.kubernetesJobManagerSpecification .getDeployment() .getSpec() .getTemplate() .getSpec(); assertThat(resultPodSpec.getContainers()).hasSize(1); assertThat(resultPodSpec.getServiceAccountName()).isEqualTo(SERVICE_ACCOUNT_NAME); assertThat(resultPodSpec.getVolumes()).hasSize(4); final Container resultedMainContainer = resultPodSpec.getContainers().get(0); assertThat(resultedMainContainer.getName()).isEqualTo(Constants.MAIN_CONTAINER_NAME); assertThat(resultedMainContainer.getImage()).isEqualTo(CONTAINER_IMAGE); assertThat(resultedMainContainer.getImagePullPolicy()) .isEqualTo(CONTAINER_IMAGE_PULL_POLICY.name()); assertThat(resultedMainContainer.getEnv()).hasSize(3); assertThat(resultedMainContainer.getEnv().stream()) .anyMatch(envVar -> envVar.getName().equals("key1")); assertThat(resultedMainContainer.getPorts()).hasSize(3); final Map<String, Quantity> requests = resultedMainContainer.getResources().getRequests(); assertThat(requests.get("cpu").getAmount()).isEqualTo(Double.toString(JOB_MANAGER_CPU)); assertThat(requests.get("memory").getAmount()) .isEqualTo(String.valueOf(JOB_MANAGER_MEMORY)); assertThat(resultedMainContainer.getCommand()).hasSize(1); // The args list is [bash, -c, 'java -classpath $FLINK_CLASSPATH ...']. assertThat(resultedMainContainer.getArgs()).hasSize(3); assertThat(resultedMainContainer.getVolumeMounts()).hasSize(4); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void uploadStickerFile() throws IOException { byte[] bytes = Files.readAllBytes(stickerFile.toPath()); GetFileResponse response = bot.execute(new UploadStickerFile(chatId, bytes, Sticker.Format.Static)); FileTest.check(response.file(), false); response = bot.execute(new UploadStickerFile(chatId, bytes, Sticker.Format.Static)); FileTest.check(response.file(), false); }
public Analysis analyze(Statement statement) { return analyze(statement, false); }
@Test public void testLambdaInSubqueryContext() { analyze("SELECT apply(x, i -> i * i) FROM (SELECT 10 x)"); analyze("SELECT apply((SELECT 10), i -> i * i)"); // with capture analyze("SELECT apply(x, i -> i * x) FROM (SELECT 10 x)"); analyze("SELECT apply(x, y -> y * x) FROM (SELECT 10 x, 3 y)"); analyze("SELECT apply(x, z -> y * x) FROM (SELECT 10 x, 3 y)"); }
public static Predicate parse(String expression) { final Stack<Predicate> predicateStack = new Stack<>(); final Stack<Character> operatorStack = new Stack<>(); final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll(""); final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true); boolean isTokenMode = true; while (true) { final Character operator; final String token; if (isTokenMode) { if (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); } else { break; } if (OPERATORS.contains(token)) { operator = token.charAt(0); } else { operator = null; } } else { operator = operatorStack.pop(); token = null; } isTokenMode = true; if (operator == null) { try { predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance()); } catch (ClassCastException e) { throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e); } catch (Exception e) { throw new RuntimeException(e); } } else { if (operatorStack.empty() || operator == '(') { operatorStack.push(operator); } else if (operator == ')') { while (operatorStack.peek() != '(') { evaluate(predicateStack, operatorStack); } operatorStack.pop(); } else { if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek())) { evaluate(predicateStack, operatorStack); isTokenMode = false; } operatorStack.push(operator); } } } while (!operatorStack.empty()) { evaluate(predicateStack, operatorStack); } if (predicateStack.size() > 1) { throw new RuntimeException("Invalid logical expression"); } return predicateStack.pop(); }
@Test public void testAndParenOr() { final Predicate parsed = PredicateExpressionParser.parse("com.linkedin.data.it.AlwaysTruePredicate & (com.linkedin.data.it.AlwaysTruePredicate | com.linkedin.data.it.AlwaysFalsePredicate)"); Assert.assertEquals(parsed.getClass(), AndPredicate.class); final List<Predicate> andChildren = ((AndPredicate) parsed).getChildPredicates(); Assert.assertEquals(andChildren.get(0).getClass(), AlwaysTruePredicate.class); Assert.assertEquals(andChildren.get(1).getClass(), OrPredicate.class); final List<Predicate> orChildren = ((OrPredicate) andChildren.get(1)).getChildPredicates(); Assert.assertEquals(orChildren.get(0).getClass(), AlwaysTruePredicate.class); Assert.assertEquals(orChildren.get(1).getClass(), AlwaysFalsePredicate.class); }
@Override public HttpHeaders add(HttpHeaders headers) { if (headers instanceof DefaultHttpHeaders) { this.headers.add(((DefaultHttpHeaders) headers).headers); return this; } else { return super.add(headers); } }
@Test public void emptyHeaderNameNotAllowed() { assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { new DefaultHttpHeaders().add(StringUtil.EMPTY_STRING, "foo"); } }); }
public static AccessTokenRetriever create(Map<String, ?> configs, Map<String, Object> jaasConfig) { return create(configs, null, jaasConfig); }
@Test public void testConfigureRefreshingFileAccessTokenRetrieverWithInvalidFile() throws Exception { // Should fail because the while the parent path exists, the file itself doesn't. File tmpDir = createTempDir("this-directory-does-exist"); File accessTokenFile = new File(tmpDir, "this-file-does-not-exist.json"); Map<String, ?> configs = getSaslConfigs(SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL, accessTokenFile.toURI().toString()); Map<String, Object> jaasConfig = Collections.emptyMap(); assertThrowsWithMessage(ConfigException.class, () -> AccessTokenRetrieverFactory.create(configs, jaasConfig), "that doesn't exist"); }
public void addChild(Entry entry) { childEntries.add(entry); entry.setParent(this); }
@Test public void returnsNullIfAncestorComponentIsNotAvailable(){ Entry structureWithEntry = new Entry(); final Entry child = new Entry(); structureWithEntry.addChild(child); assertThat(new EntryAccessor().getAncestorComponent(child), CoreMatchers.nullValue()); }
public Result check(IndexSetTemplate indexSetTemplate) { return indexSetTemplateRequirements.stream() .sorted(Comparator.comparing(IndexSetTemplateRequirement::priority)) .map(indexSetTemplateRequirement -> indexSetTemplateRequirement.check(indexSetTemplate)) .filter(result -> !result.fulfilled()) .findFirst() .orElse(new Result(true, "")); }
@Test void testPriorityOrder() { when(requirement1.check(any())).thenReturn(new Result(true, "")); when(requirement2.check(any())).thenReturn(new Result(true, "")); underTest.check(indexSetTemplate); requirements.verify(requirement1).check(any()); requirements.verify(requirement2).check(any()); }
public Optional<UfsStatus[]> listFromUfsThenCache(String path, boolean isRecursive) throws IOException { // Recursive listing results are not cached. if (mListStatusCache == null || isRecursive) { return listFromUfs(path, isRecursive); } try { ListStatusResult cached = mListStatusCache.get(path, (k) -> { try { Optional<UfsStatus[]> listResults = listFromUfs(path, false); return listResults.map( ufsStatuses -> new ListStatusResult( System.nanoTime(), ufsStatuses, ufsStatuses.length == 1 && ufsStatuses[0].isFile() )) // This cache also serves as absent cache, so we persist a NULL (not empty) result, // if the path not found or is not a directory. .orElseGet(() -> new ListStatusResult(System.nanoTime(), null, false)); } catch (Exception e) { throw new RuntimeException(e); } }); if (cached == null) { return Optional.empty(); } else { return Optional.ofNullable(cached.mUfsStatuses); } } catch (RuntimeException e) { Throwable cause = e.getCause(); if (cause instanceof IOException) { throw (IOException) cause; } throw new RuntimeException(e); } }
@Test public void listFromUfsThenCacheWhenGetFail() throws IOException { UnderFileSystem system = mock(UnderFileSystem.class); doThrow(new IOException()).when(system).listStatus(anyString(), any()); doReturn(system).when(mDoraUfsManager).getOrAdd(any(), any()); assertThrows(IOException.class, () -> { mManager.listFromUfsThenCache("/test", false); }); }
public static String getOSIconName(String os) { final String tmp = os.toLowerCase(Locale.ENGLISH); for (final String anOS : OS) { if (tmp.contains(anOS)) { return anOS + ".png"; } } return null; }
@Test public void testGetOSIconName() { assertNotNull("getOSIconName", HtmlJavaInformationsReport.getOSIconName("Linux")); assertNull("getOSIconName", HtmlJavaInformationsReport.getOSIconName("unknown")); }
public EvaluationResult evaluate(Condition condition, Measure measure) { checkArgument(SUPPORTED_METRIC_TYPE.contains(condition.getMetric().getType()), "Conditions on MetricType %s are not supported", condition.getMetric().getType()); Comparable measureComparable = parseMeasure(measure); if (measureComparable == null) { return new EvaluationResult(Measure.Level.OK, null); } return evaluateCondition(condition, measureComparable) .orElseGet(() -> new EvaluationResult(Measure.Level.OK, measureComparable)); }
@Test public void condition_is_always_ok_when_measure_is_noValue() { for (MetricType metricType : from(asList(values())).filter(not(in(ImmutableSet.of(BOOL, DATA, DISTRIB, STRING))))) { Metric metric = createMetric(metricType); Measure measure = newMeasureBuilder().createNoValue(); assertThat(underTest.evaluate(createCondition(metric, LESS_THAN, "10.2"), measure)).hasLevel(OK); } }
@Override public InterpreterResult interpret(String cmd, InterpreterContext interpreterContext) { try { logger.info(cmd); this.writer.getBuffer().setLength(0); this.scriptingContainer.runScriptlet(cmd); this.writer.flush(); logger.debug(writer.toString()); return new InterpreterResult(InterpreterResult.Code.SUCCESS, writer.getBuffer().toString()); } catch (Throwable t) { logger.error("Can not run '" + cmd + "'", t); return new InterpreterResult(InterpreterResult.Code.ERROR, t.getMessage()); } }
@Test void putsTest() { InterpreterResult result = hbaseInterpreter.interpret("puts \"Hello World\"", null); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(InterpreterResult.Type.TEXT, result.message().get(0).getType()); assertEquals("Hello World\n", result.message().get(0).getData()); }
@Override public Set<Router> routers() { return osRouterStore.routers(); }
@Test public void testGetRouters() { createBasicRouters(); assertEquals("Number of router did not match", 1, target.routers().size()); }
@Override public void createService(String serviceName) throws NacosException { createService(serviceName, Constants.DEFAULT_GROUP); }
@Test void testCreateService5() throws NacosException { //given String serviceName = "service1"; String groupName = "groupName"; float protectThreshold = 0.1f; String expression = "k=v"; //when nacosNamingMaintainService.createService(serviceName, groupName, protectThreshold, expression); //then verify(serverProxy, times(1)).createService(argThat(new ArgumentMatcher<Service>() { @Override public boolean matches(Service service) { return service.getName().equals(serviceName) && service.getGroupName().equals(groupName) && Math.abs(service.getProtectThreshold() - protectThreshold) < 0.1f && service.getMetadata().size() == 0; } }), argThat(o -> ((ExpressionSelector) o).getExpression().equals(expression))); }
@Override public List<String> assignSegment(String segmentName, Map<String, Map<String, String>> currentAssignment, InstancePartitions instancePartitions, InstancePartitionsType instancePartitionsType) { int numPartitions = instancePartitions.getNumPartitions(); checkReplication(instancePartitions, _replication, _tableName); int partitionId; if (_partitionColumn == null || numPartitions == 1) { partitionId = 0; } else { // Uniformly spray the segment partitions over the instance partitions if (_tableConfig.getTableType() == TableType.OFFLINE) { partitionId = SegmentAssignmentUtils .getOfflineSegmentPartitionId(segmentName, _tableName, _helixManager, _partitionColumn) % numPartitions; } else { partitionId = SegmentAssignmentUtils .getRealtimeSegmentPartitionId(segmentName, _tableName, _helixManager, _partitionColumn) % numPartitions; } } return SegmentAssignmentUtils.assignSegmentWithReplicaGroup(currentAssignment, instancePartitions, partitionId); }
@Test public void testBootstrapTableWithoutPartition() { Map<String, Map<String, String>> currentAssignment = new TreeMap<>(); for (String segmentName : SEGMENTS) { List<String> instancesAssigned = _segmentAssignmentWithoutPartition .assignSegment(segmentName, currentAssignment, _instancePartitionsMapWithoutPartition); currentAssignment .put(segmentName, SegmentAssignmentUtils.getInstanceStateMap(instancesAssigned, SegmentStateModel.ONLINE)); } // Bootstrap table should reassign all segments based on their alphabetical order RebalanceConfig rebalanceConfig = new RebalanceConfig(); rebalanceConfig.setBootstrap(true); Map<String, Map<String, String>> newAssignment = _segmentAssignmentWithoutPartition.rebalanceTable(currentAssignment, _instancePartitionsMapWithoutPartition, null, null, rebalanceConfig); assertEquals(newAssignment.size(), NUM_SEGMENTS); List<String> sortedSegments = new ArrayList<>(SEGMENTS); sortedSegments.sort(null); for (int i = 0; i < NUM_SEGMENTS; i++) { assertEquals(newAssignment.get(sortedSegments.get(i)), currentAssignment.get(SEGMENTS.get(i))); } }
@Override public Map<String, String> convertToEntityAttribute(String dbData) { return GSON.fromJson(dbData, TYPE); }
@Test void convertToEntityAttribute_null_oneElement() throws IOException { Map<String, String> map = new HashMap<>(8); map.put("a", "1"); String content = readAllContentOf("json/converter/element.1.json"); assertEquals(map, this.converter.convertToEntityAttribute(content)); }
public ContentPackInstallation insert(final ContentPackInstallation installation) { final WriteResult<ContentPackInstallation, ObjectId> writeResult = dbCollection.insert(installation); return writeResult.getSavedObject(); }
@Test public void insert() { final ContentPackInstallation contentPackInstallation = ContentPackInstallation.builder() .contentPackId(ModelId.of("content-pack-id")) .contentPackRevision(1) .parameters(ImmutableMap.of()) .entities(ImmutableSet.of()) .comment("Comment") .createdAt(ZonedDateTime.of(2018, 7, 16, 14, 0, 0, 0, ZoneOffset.UTC).toInstant()) .createdBy("username") .build(); final ContentPackInstallation savedContentPack = persistenceService.insert(contentPackInstallation); assertThat(savedContentPack.id()).isNotNull(); assertThat(savedContentPack).isEqualToIgnoringGivenFields(contentPackInstallation, "id"); }
@Override public DataSerializableFactory createFactory() { return new Factory(); }
@Test public void testExistingTypes() { MetricsDataSerializerHook hook = new MetricsDataSerializerHook(); IdentifiedDataSerializable readMetricsOperation = hook.createFactory() .create(MetricsDataSerializerHook.READ_METRICS); assertTrue(readMetricsOperation instanceof ReadMetricsOperation); }
@Override @Nullable public byte[] readByteArray() throws IOException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { byte[] b = new byte[len]; readFully(b); return b; } return EMPTY_BYTE_ARRAY; }
@Test public void testReadByteArray() throws Exception { byte[] bytesBE = {0, 0, 0, 0, 0, 0, 0, 1, 1, 9, -1, -1, -1, -1}; byte[] bytesLE = {0, 0, 0, 0, 1, 0, 0, 0, (byte) 1, 9, -1, -1, -1, -1}; in.init((byteOrder == BIG_ENDIAN ? bytesBE : bytesLE), 0); in.position(10); byte[] theNullArray = in.readByteArray(); in.position(0); byte[] theZeroLengthArray = in.readByteArray(); in.position(4); byte[] bytes = in.readByteArray(); assertNull(theNullArray); assertArrayEquals(new byte[0], theZeroLengthArray); assertArrayEquals(new byte[]{1}, bytes); }
@Override public Long clusterCountKeysInSlot(int slot) { RedisClusterNode node = clusterGetNodeForSlot(slot); MasterSlaveEntry entry = executorService.getConnectionManager().getEntry(new InetSocketAddress(node.getHost(), node.getPort())); RFuture<Long> f = executorService.readAsync(entry, StringCodec.INSTANCE, RedisCommands.CLUSTER_COUNTKEYSINSLOT, slot); return syncFuture(f); }
@Test public void testClusterCountKeysInSlot() { Long t = connection.clusterCountKeysInSlot(1); assertThat(t).isZero(); }
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); Last5 l5 = new Last5(); int read; // Try to get the creation date, which is YYYYMMDDhhmm byte[] header = new byte[30]; IOUtils.readFully(stream, header); byte[] date = new byte[12]; IOUtils.readFully(stream, date); String dateStr = new String(date, US_ASCII); if (dateStr.startsWith("19") || dateStr.startsWith("20")) { String formattedDate = dateStr.substring(0, 4) + "-" + dateStr.substring(4, 6) + "-" + dateStr.substring(6, 8) + "T" + dateStr.substring(8, 10) + ":" + dateStr.substring(10, 12) + ":00"; metadata.set(TikaCoreProperties.CREATED, formattedDate); // TODO Metadata.DATE is used as modified, should it be here? metadata.set(TikaCoreProperties.CREATED, formattedDate); } metadata.set(Metadata.CONTENT_TYPE, PRT_MIME_TYPE); // The description, if set, is the next up-to-500 bytes byte[] desc = new byte[500]; IOUtils.readFully(stream, desc); String description = extractText(desc, true); if (description.length() > 0) { metadata.set(TikaCoreProperties.DESCRIPTION, description); } // Now look for text while ((read = stream.read()) > -1) { if (read == 0xe0 || read == 0xe3 || read == 0xf0) { int nread = stream.read(); if (nread == 0x3f || nread == 0xbf) { // Looks promising, check back for a suitable value if (read == 0xe3 && nread == 0x3f) { if (l5.is33()) { // Bingo, note text handleNoteText(stream, xhtml); } } else if (l5.is00()) { // Likely view name handleViewName(read, nread, stream, xhtml, l5); } } } else { l5.record(read); } } }
@Test public void testPRTParserComplex() throws Exception { try (InputStream input = getResourceAsStream("/test-documents/testCADKEY2.prt")) { Metadata metadata = new Metadata(); ContentHandler handler = new BodyContentHandler(); new PRTParser().parse(input, handler, metadata, new ParseContext()); assertEquals("application/x-prt", metadata.get(Metadata.CONTENT_TYPE)); // File has both a date and a description assertEquals("1997-04-01T08:59:00", metadata.get(TikaCoreProperties.CREATED)); assertEquals("TIKA TEST PART DESCRIPTION INFORMATION\r\n", metadata.get(TikaCoreProperties.DESCRIPTION)); String contents = handler.toString(); assertContains("ITEM", contents); assertContains("REQ.", contents); assertContains("DESCRIPTION", contents); assertContains("MAT'L", contents); assertContains("TOLERANCES UNLESS", contents); assertContains("FRACTIONS", contents); assertContains("ANGLES", contents); assertContains("Acme Corporation", contents); assertContains("DATE", contents); assertContains("CHANGE", contents); assertContains("DRAWN BY", contents); assertContains("SCALE", contents); assertContains("TIKA TEST DRAWING", contents); assertContains("TIKA LETTERS", contents); assertContains("5.82", contents); assertContains("112" + '\u00b0', contents); // Degrees assertContains("TIKA TEST LETTER", contents); assertContains("17.11", contents); assertContains('\u00d8' + "\ufffd2.000", contents); // Diameter assertContains("Diameter", contents); assertContains("The Apache Tika toolkit", contents); } }
public void createRole( IRole newRole ) throws KettleException { normalizeRoleInfo( newRole ); if ( !validateRoleInfo( newRole ) ) { throw new KettleException( BaseMessages.getString( PurRepositorySecurityManager.class, "PurRepositorySecurityManager.ERROR_0001_INVALID_NAME" ) ); } userRoleDelegate.createRole( newRole ); }
@Test( expected = KettleException.class ) public void createRole_NormalizesInfo_FailsIfStillBreaches() throws Exception { IRole info = new EERoleInfo( " ", "" ); manager.createRole( info ); }
public void moralize() { for (GraphNode<BayesVariable> v : graph ) { for ( Edge e1 : v.getInEdges() ) { GraphNode pV1 = graph.getNode(e1.getOutGraphNode().getId()); moralize(v, pV1); } } }
@Test public void testMoralize1() { Graph<BayesVariable> graph = new BayesNetwork(); GraphNode x0 = addNode(graph); GraphNode x1 = addNode(graph); GraphNode x2 = addNode(graph); GraphNode x3 = addNode(graph); GraphNode x4 = addNode(graph); connectParentToChildren(x2, x1); connectParentToChildren(x3, x1); connectParentToChildren(x4, x1); JunctionTreeBuilder jtBuilder = new JunctionTreeBuilder( graph ); assertLinkedNode(jtBuilder, 1, 2, 3, 4); assertLinkedNode(jtBuilder, 2, 1); assertLinkedNode(jtBuilder, 3, 1); assertLinkedNode(jtBuilder, 4, 1); jtBuilder.moralize(); assertLinkedNode(jtBuilder, 1, 2, 3, 4); assertLinkedNode(jtBuilder, 2, 1, 3, 4); assertLinkedNode(jtBuilder, 3, 1, 2, 4); assertLinkedNode(jtBuilder, 4, 1, 2, 3); }
public static CDCResponse succeed(final String requestId) { return succeed(requestId, ResponseCase.RESPONSE_NOT_SET, null); }
@Test void assertSucceedWhenResponseNotSet() { CDCResponse actualResponse = CDCResponseUtils.succeed("request_id_1"); assertThat(actualResponse.getStatus(), is(CDCResponse.Status.SUCCEED)); assertThat(actualResponse.getRequestId(), is("request_id_1")); }
@Override public <VO, VR> KStream<K, VR> leftJoin(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return leftJoin(otherStream, toValueJoinerWithKey(joiner), windows); }
@Test public void shouldNotAllowNullValueJoinerOnLeftJoinWithGlobalTableWithNamed() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.leftJoin( testGlobalTable, MockMapper.selectValueMapper(), (ValueJoiner<? super String, ? super String, ?>) null, Named.as("name"))); assertThat(exception.getMessage(), equalTo("joiner can't be null")); }
public void updateStatusCount(final String state, final int change) { updateStatusCount(KafkaStreams.State.valueOf(state), change); }
@Test public void shouldImplementHashCodeAndEqualsCorrectly() { final QueryStatusCount queryStatusCount1 = new QueryStatusCount(Collections.singletonMap(KsqlQueryStatus.ERROR, 2)); final QueryStatusCount queryStatusCount2 = new QueryStatusCount(Collections.singletonMap(KsqlQueryStatus.RUNNING, 1)); queryStatusCount2.updateStatusCount(KafkaStreams.State.ERROR, 2); final QueryStatusCount queryStatusCount3 = new QueryStatusCount(); queryStatusCount3.updateStatusCount(KafkaStreams.State.ERROR, 2); new EqualsTester() .addEqualityGroup(queryStatusCount, queryStatusCount) .addEqualityGroup(queryStatusCount1, queryStatusCount3) .addEqualityGroup(queryStatusCount2) .testEquals(); }
static DatabaseInput toDatabaseInput( Namespace namespace, Map<String, String> metadata, boolean skipNameValidation) { DatabaseInput.Builder builder = DatabaseInput.builder().name(toDatabaseName(namespace, skipNameValidation)); Map<String, String> parameters = Maps.newHashMap(); metadata.forEach( (k, v) -> { if (GLUE_DESCRIPTION_KEY.equals(k)) { builder.description(v); } else if (GLUE_DB_LOCATION_KEY.equals(k)) { builder.locationUri(v); } else { parameters.put(k, v); } }); return builder.parameters(parameters).build(); }
@Test public void testToDatabaseInputEmptyDescription() { Map<String, String> properties = ImmutableMap.of(IcebergToGlueConverter.GLUE_DB_LOCATION_KEY, "s3://location", "key", "val"); DatabaseInput databaseInput = IcebergToGlueConverter.toDatabaseInput(Namespace.of("ns"), properties, false); assertThat(databaseInput.locationUri()).as("Location should be set").isEqualTo("s3://location"); assertThat(databaseInput.description()).as("Description should not be set").isNull(); assertThat(databaseInput.parameters()) .as("Parameters should be set") .isEqualTo(ImmutableMap.of("key", "val")); assertThat(databaseInput.name()).as("Database name should be set").isEqualTo("ns"); }
@Override public boolean canSerialize(String topic, Target type) { String subject = schemaSubject(topic, type); return getSchemaBySubject(subject).isPresent(); }
@Test void canSerializeReturnsFalseIfSubjectDoesNotExist() { String topic = RandomString.make(10); assertThat(serde.canSerialize(topic, Serde.Target.KEY)).isFalse(); assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isFalse(); }
public long get() { return lvVal(); }
@Test public void testConstructor_withValue() { PaddedAtomicLong counter = new PaddedAtomicLong(20); assertEquals(20, counter.get()); }
public static Result<Boolean> isRecordsEquals(TableRecords beforeImage, TableRecords afterImage) { if (beforeImage == null) { return Result.build(afterImage == null, null); } else { if (afterImage == null) { return Result.build(false, null); } if (beforeImage.getTableName().equalsIgnoreCase(afterImage.getTableName()) && CollectionUtils.isSizeEquals(beforeImage.getRows(), afterImage.getRows())) { //when image is EmptyTableRecords, getTableMeta will throw an exception if (CollectionUtils.isEmpty(beforeImage.getRows())) { return Result.ok(); } return compareRows(beforeImage.getTableMeta(), beforeImage.getRows(), afterImage.getRows()); } else { return Result.build(false, null); } } }
@Test public void isRecordsEquals() { TableMeta tableMeta = Mockito.mock(TableMeta.class); Mockito.when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList(new String[]{"pk"})); Mockito.when(tableMeta.getTableName()).thenReturn("table_name"); TableRecords beforeImage = new TableRecords(); beforeImage.setTableName("table_name"); beforeImage.setTableMeta(tableMeta); List<Row> rows = new ArrayList<>(); Row row = new Row(); Field field01 = addField(row,"pk", 1, "12345"); Field field02 = addField(row,"age", 1, "18"); rows.add(row); beforeImage.setRows(rows); Assertions.assertFalse(DataCompareUtils.isRecordsEquals(beforeImage, null).getResult()); Assertions.assertFalse(DataCompareUtils.isRecordsEquals(null, beforeImage).getResult()); TableRecords afterImage = new TableRecords(); afterImage.setTableName("table_name1"); // wrong table name afterImage.setTableMeta(tableMeta); Assertions.assertFalse(DataCompareUtils.isRecordsEquals(beforeImage, afterImage).getResult()); afterImage.setTableName("table_name"); Assertions.assertFalse(DataCompareUtils.isRecordsEquals(beforeImage, afterImage).getResult()); List<Row> rows2 = new ArrayList<>(); Row row2 = new Row(); Field field11 = addField(row2,"pk", 1, "12345"); Field field12 = addField(row2,"age", 1, "18"); rows2.add(row2); afterImage.setRows(rows2); Assertions.assertTrue(DataCompareUtils.isRecordsEquals(beforeImage, afterImage).getResult()); field11.setValue("23456"); Assertions.assertFalse(DataCompareUtils.isRecordsEquals(beforeImage, afterImage).getResult()); field11.setValue("12345"); field12.setName("sex"); Assertions.assertFalse(DataCompareUtils.isRecordsEquals(beforeImage, afterImage).getResult()); field12.setName("age"); field12.setValue("19"); Assertions.assertFalse(DataCompareUtils.isRecordsEquals(beforeImage, afterImage).getResult()); field12.setName("18"); Field field3 = new Field("pk", 1, "12346"); Row row3 = new Row(); row3.add(field3); rows2.add(row3); Assertions.assertFalse(DataCompareUtils.isRecordsEquals(beforeImage, afterImage).getResult()); beforeImage.setRows(new ArrayList<>()); afterImage.setRows(new ArrayList<>()); Assertions.assertTrue(DataCompareUtils.isRecordsEquals(beforeImage, afterImage).getResult()); }
public static String getJceProvider(Configuration conf) { final String provider = conf.getTrimmed( CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY, ""); final boolean autoAdd = conf.getBoolean( CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_KEY, CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_DEFAULT); // For backward compatible, auto-add BOUNCY_CASTLE_PROVIDER_CLASS when the provider is "BC". if (autoAdd && BOUNCY_CASTLE_PROVIDER_NAME.equals(provider)) { try { // Use reflection in order to avoid statically loading the class. final Class<?> clazz = Class.forName(BOUNCY_CASTLE_PROVIDER_CLASS); Security.addProvider((Provider) clazz.getConstructor().newInstance()); LOG.debug("Successfully added security provider {}", provider); if (LOG.isTraceEnabled()) { LOG.trace("Trace", new Throwable()); } } catch (ClassNotFoundException e) { LOG_FAILED_TO_LOAD_CLASS.warn("Failed to load " + BOUNCY_CASTLE_PROVIDER_CLASS, e); } catch (Exception e) { LOG_FAILED_TO_ADD_PROVIDER.warn("Failed to add security provider for {}", provider, e); } } return provider; }
@Test(timeout = 5_000) public void testAutoAddEnabled() { assertRemoveProvider(); final Configuration conf = new Configuration(); Assertions.assertThat(conf.get(HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_KEY)) .describedAs("conf: " + HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_KEY) .isEqualToIgnoringCase("true"); Assert.assertTrue(HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_DEFAULT); conf.set(HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY, CryptoUtils.BOUNCY_CASTLE_PROVIDER_NAME); final String providerFromConf = CryptoUtils.getJceProvider(conf); Assert.assertEquals(CryptoUtils.BOUNCY_CASTLE_PROVIDER_NAME, providerFromConf); final Provider provider = Security.getProvider(BouncyCastleProvider.PROVIDER_NAME); Assertions.assertThat(provider) .isInstanceOf(BouncyCastleProvider.class); assertRemoveProvider(); }
@Override public int hashCode() { return Objects.hash(creation, update, access); }
@Test public void hash() { assertThat(eternal.hashCode()).isNotEqualTo(temporal.hashCode()); }
public static XMLInputFactory getXMLInputFactory() { return getProtectedFactory(XMLInputFactory.newInstance()); }
@Test public void testGetXmlInputFactory() { XMLInputFactory xmlInputFactory = XmlUtil.getXMLInputFactory(); assertNotNull(xmlInputFactory); // check if the XXE protection is enabled assertThrows(XMLStreamException.class, () -> staxReadEvents(xmlInputFactory.createXMLEventReader(new StringReader(server.getTestXml())))); assertEquals(0, server.getHits()); assertThrows(IllegalArgumentException.class, () -> XmlUtil.setProperty(xmlInputFactory, "test://no-such-property", false)); ignoreXxeFailureProp.setOrClearProperty("false"); assertThrows(IllegalArgumentException.class, () -> XmlUtil.setProperty(xmlInputFactory, "test://no-such-property", false)); ignoreXxeFailureProp.setOrClearProperty("true"); XmlUtil.setProperty(xmlInputFactory, "test://no-such-feature", false); }
public static String localizedString(final String key) { return localizedString(key, "Localizable"); }
@Test public void testLocalizedString() { assertEquals("La clé d''hôte fournie est {0}.", LocaleFactory.localizedString("La clé d'hôte fournie est {0}.", "Localizable")); }
@Override public <VR> KStream<K, VR> flatMapValues(final ValueMapper<? super V, ? extends Iterable<? extends VR>> mapper) { return flatMapValues(withKey(mapper)); }
@Test public void shouldNotAllowNullMapperOnFlatMapValuesWithNamed() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.flatMapValues( (ValueMapper<Object, Iterable<Object>>) null, Named.as("flatValueMapper"))); assertThat(exception.getMessage(), equalTo("valueMapper can't be null")); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ShowTableMetaDataStatement sqlStatement, final ContextManager contextManager) { String defaultSchema = new DatabaseTypeRegistry(database.getProtocolType()).getDefaultSchemaName(database.getName()); ShardingSphereSchema schema = database.getSchema(defaultSchema); return sqlStatement.getTableNames().stream().filter(each -> schema.getAllTableNames().contains(each.toLowerCase())) .map(each -> buildTableRows(database.getName(), schema, each.toLowerCase())).flatMap(Collection::stream).collect(Collectors.toList()); }
@Test void assertExecute() { ShardingSphereDatabase database = mockDatabase(); ShowTableMetaDataExecutor executor = new ShowTableMetaDataExecutor(); executor.setDatabase(database); Collection<LocalDataQueryResultRow> actual = executor.getRows(createSqlStatement(), mock(ContextManager.class)); assertThat(actual.size(), is(2)); Iterator<LocalDataQueryResultRow> iterator = actual.iterator(); LocalDataQueryResultRow row = iterator.next(); assertThat(row.getCell(1), is("foo_db")); assertThat(row.getCell(2), is("t_order")); assertThat(row.getCell(3), is("COLUMN")); assertThat(row.getCell(4), is("order_id")); assertThat(row.getCell(5), is("{\"name\":\"order_id\",\"dataType\":0,\"primaryKey\":false,\"generated\":false,\"caseSensitive\":false,\"visible\":true,\"unsigned\":false,\"nullable\":false}")); row = iterator.next(); assertThat(row.getCell(1), is("foo_db")); assertThat(row.getCell(2), is("t_order")); assertThat(row.getCell(3), is("INDEX")); assertThat(row.getCell(4), is("primary")); assertThat(row.getCell(5), is("{\"name\":\"primary\",\"columns\":[],\"unique\":false}")); }
@NonNull @Override public List<IdpEntry> fetchIdpOptions() { return fedMasterClient.listAvailableIdps(); }
@Test void fetchIdpOptions() { var fedmasterClient = mock(FederationMasterClient.class); var sut = new SelectSectoralIdpStepImpl( null, fedmasterClient, null, null, null, null, null, null, null); var entries = List.of(new IdpEntry("https://tk.example.com", "Techniker KK", null)); when(fedmasterClient.listAvailableIdps()).thenReturn(entries); // when var idps = sut.fetchIdpOptions(); // then assertEquals(entries, idps); }
@Override public void handleTenantInfo(TenantInfoHandler handler) { // 如果禁用,则不执行逻辑 if (isTenantDisable()) { return; } // 获得租户 TenantDO tenant = getTenant(TenantContextHolder.getRequiredTenantId()); // 执行处理器 handler.handle(tenant); }
@Test public void testHandleTenantInfo_success() { // 准备参数 TenantInfoHandler handler = mock(TenantInfoHandler.class); // mock 未禁用 when(tenantProperties.getEnable()).thenReturn(true); // mock 租户 TenantDO dbTenant = randomPojo(TenantDO.class); tenantMapper.insert(dbTenant);// @Sql: 先插入出一条存在的数据 TenantContextHolder.setTenantId(dbTenant.getId()); // 调用 tenantService.handleTenantInfo(handler); // 断言 verify(handler).handle(argThat(argument -> { assertPojoEquals(dbTenant, argument); return true; })); }
@Override public boolean filterPath(Path filePath) { if (getIncludeMatchers().isEmpty() && getExcludeMatchers().isEmpty()) { return false; } // compensate for the fact that Flink paths are slashed final String path = filePath.hasWindowsDrive() ? filePath.getPath().substring(1) : filePath.getPath(); final java.nio.file.Path nioPath = Paths.get(path); for (PathMatcher matcher : getIncludeMatchers()) { if (matcher.matches(nioPath)) { return shouldExclude(nioPath); } } return true; }
@Test void testIncludeFileWithCharacterSetMatcher() { GlobFilePathFilter matcher = new GlobFilePathFilter( Collections.singletonList("dir/[acd].txt"), Collections.emptyList()); assertThat(matcher.filterPath(new Path("dir/a.txt"))).isFalse(); assertThat(matcher.filterPath(new Path("dir/c.txt"))).isFalse(); assertThat(matcher.filterPath(new Path("dir/d.txt"))).isFalse(); assertThat(matcher.filterPath(new Path("dir/z.txt"))).isTrue(); }
@Override public Date parse(final String input) throws InvalidDateException { try { return Date.from(LocalDateTime.parse(input, DateTimeFormatter.ISO_DATE_TIME).toInstant(ZoneOffset.UTC)); } catch(DateTimeParseException e) { throw new InvalidDateException(e.getMessage(), e); } }
@Test public void testParseWithoutMilliseconds() throws Exception { final ISO8601DateFormatter formatter = new ISO8601DateFormatter(); assertThrows(InvalidDateException.class, () -> formatter.parse("").getTime()); assertEquals(1667567722000L, formatter.parse("2022-11-04T13:15:22Z").getTime(), 0L); }
public InetSocketAddress getManagedPort( final UdpChannel udpChannel, final InetSocketAddress bindAddress) throws BindException { InetSocketAddress address = bindAddress; if (bindAddress.getPort() != 0) { portSet.add(bindAddress.getPort()); } else if (!isOsWildcard) { // do not map if not a subscription and does not have a control address. We want to use an ephemeral port // for the control channel on publications. if (!isSender || udpChannel.hasExplicitControl()) { address = new InetSocketAddress(bindAddress.getAddress(), allocateOpenPort()); } } return address; }
@Test void shouldAllocateForPubWithExplicitControlAddress() throws BindException { final InetSocketAddress bindAddress = new InetSocketAddress("localhost", 0); final WildcardPortManager manager = new WildcardPortManager(portRange, true); assertThat(manager.getManagedPort( udpChannelPubControl, bindAddress), is(new InetSocketAddress("localhost", 20000))); }
@RequiresApi(Build.VERSION_CODES.R) @Override public boolean onInlineSuggestionsResponse(@NonNull InlineSuggestionsResponse response) { final List<InlineSuggestion> inlineSuggestions = response.getInlineSuggestions(); if (inlineSuggestions.size() > 0) { mInlineSuggestionAction.onNewSuggestions(inlineSuggestions); getInputViewContainer().addStripAction(mInlineSuggestionAction, true); getInputViewContainer().setActionsStripVisibility(true); } return !inlineSuggestions.isEmpty(); }
@Test public void testActionStripAddedForGeneric() { simulateOnStartInputFlow(); mAnySoftKeyboardUnderTest.onInlineSuggestionsResponse( mockResponse(Mockito.mock(InlineContentView.class))); ImageView icon = mAnySoftKeyboardUnderTest .getInputViewContainer() .findViewById(R.id.inline_suggestions_strip_icon); Assert.assertEquals( R.drawable.ic_inline_suggestions, Shadows.shadowOf(icon.getDrawable()).getCreatedFromResId()); }
public static boolean parseBoolean(final String value) { return booleanStringMatches(value, true); }
@Test public void shouldParseAnythingElseAsFalse() { assertThat(SqlBooleans.parseBoolean(""), is(false)); assertThat(SqlBooleans.parseBoolean(" true "), is(false)); assertThat(SqlBooleans.parseBoolean("yes "), is(false)); assertThat(SqlBooleans.parseBoolean("false"), is(false)); assertThat(SqlBooleans.parseBoolean("what ever"), is(false)); }
public static ShardingRouteEngine newInstance(final ShardingRule shardingRule, final ShardingSphereDatabase database, final QueryContext queryContext, final ShardingConditions shardingConditions, final ConfigurationProperties props, final ConnectionContext connectionContext) { SQLStatementContext sqlStatementContext = queryContext.getSqlStatementContext(); SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof TCLStatement) { return new ShardingDatabaseBroadcastRoutingEngine(); } if (sqlStatement instanceof DDLStatement) { if (sqlStatementContext instanceof CursorAvailable) { return getCursorRouteEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props); } return getDDLRoutingEngine(shardingRule, database, sqlStatementContext); } if (sqlStatement instanceof DALStatement) { return getDALRoutingEngine(shardingRule, database, sqlStatementContext, connectionContext); } if (sqlStatement instanceof DCLStatement) { return getDCLRoutingEngine(shardingRule, database, sqlStatementContext); } return getDQLRoutingEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props, connectionContext); }
@Test void assertNewInstanceForDALWithoutTables() { DALStatement dalStatement = mock(DALStatement.class); when(sqlStatementContext.getSqlStatement()).thenReturn(dalStatement); QueryContext queryContext = new QueryContext(sqlStatementContext, "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)); ShardingRouteEngine actual = ShardingRouteEngineFactory.newInstance(shardingRule, database, queryContext, shardingConditions, props, new ConnectionContext(Collections::emptySet)); assertThat(actual, instanceOf(ShardingDataSourceGroupBroadcastRoutingEngine.class)); }
@Nullable @Override public Message decode(@Nonnull final RawMessage rawMessage) { final GELFMessage gelfMessage = new GELFMessage(rawMessage.getPayload(), rawMessage.getRemoteAddress()); final String json = gelfMessage.getJSON(decompressSizeLimit, charset); final JsonNode node; try { node = objectMapper.readTree(json); if (node == null) { throw new IOException("null result"); } } catch (final Exception e) { log.error("Could not parse JSON, first 400 characters: " + StringUtils.abbreviate(json, 403), e); throw new IllegalStateException("JSON is null/could not be parsed (invalid JSON)", e); } try { validateGELFMessage(node, rawMessage.getId(), rawMessage.getRemoteAddress()); } catch (IllegalArgumentException e) { log.trace("Invalid GELF message <{}>", node); throw e; } // Timestamp. final double messageTimestamp = timestampValue(node); final DateTime timestamp; if (messageTimestamp <= 0) { timestamp = rawMessage.getTimestamp(); } else { // we treat this as a unix timestamp timestamp = Tools.dateTimeFromDouble(messageTimestamp); } final Message message = messageFactory.createMessage( stringValue(node, "short_message"), stringValue(node, "host"), timestamp ); message.addField(Message.FIELD_FULL_MESSAGE, stringValue(node, "full_message")); final String file = stringValue(node, "file"); if (file != null && !file.isEmpty()) { message.addField("file", file); } final long line = longValue(node, "line"); if (line > -1) { message.addField("line", line); } // Level is set by server if not specified by client. final int level = intValue(node, "level"); if (level > -1) { message.addField("level", level); } // Facility is set by server if not specified by client. final String facility = stringValue(node, "facility"); if (facility != null && !facility.isEmpty()) { message.addField("facility", facility); } // Add additional data if there is some. final Iterator<Map.Entry<String, JsonNode>> fields = node.fields(); while (fields.hasNext()) { final Map.Entry<String, JsonNode> entry = fields.next(); String key = entry.getKey(); // Do not index useless GELF "version" field. if ("version".equals(key)) { continue; } // Don't include GELF syntax underscore in message field key. if (key.startsWith("_") && key.length() > 1) { key = key.substring(1); } // We already set short_message and host as message and source. Do not add as fields again. if ("short_message".equals(key) || "host".equals(key)) { continue; } // Skip standard or already set fields. if (message.getField(key) != null || Message.RESERVED_FIELDS.contains(key) && !Message.RESERVED_SETTABLE_FIELDS.contains(key)) { continue; } // Convert JSON containers to Strings, and pick a suitable number representation. final JsonNode value = entry.getValue(); final Object fieldValue; if (value.isContainerNode()) { fieldValue = value.toString(); } else if (value.isFloatingPointNumber()) { fieldValue = value.asDouble(); } else if (value.isIntegralNumber()) { fieldValue = value.asLong(); } else if (value.isNull()) { log.debug("Field [{}] is NULL. Skipping.", key); continue; } else if (value.isTextual()) { fieldValue = value.asText(); } else { log.debug("Field [{}] has unknown value type. Skipping.", key); continue; } message.addField(key, fieldValue); } return message; }
@Test(expected = IllegalStateException.class) public void decodeDoesNotThrowIllegalArgumentExceptionIfJsonIsInvalid() throws Exception { // this fails gelf parsing, but empty Payloads are now ok. final RawMessage rawMessage = new RawMessage(new byte[0]); codec.decode(rawMessage); }
@Nullable public static Field findPropertyField(Class<?> clazz, String fieldName) { Field field; try { field = clazz.getField(fieldName); } catch (NoSuchFieldException e) { return null; } if (!Modifier.isPublic(field.getModifiers()) || Modifier.isStatic(field.getModifiers())) { return null; } return field; }
@Test public void when_findPropertyField_publicStatic_then_returnsNull() { assertNull(findPropertyField(JavaFields.class, "publicStaticField")); }