focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public void run() { try { final Set<String> distinctRecurringJobSignatures = getDistinctRecurringJobSignaturesThatDoNotExistAnymore(); final Set<String> distinctScheduledJobSignatures = getDistinctScheduledJobSignaturesThatDoNotExistAnymore(); Set<String> jobsThatCannotBeFound = asSet(distinctRecurringJobSignatures, distinctScheduledJobSignatures); if (!distinctRecurringJobSignatures.isEmpty() || !distinctScheduledJobSignatures.isEmpty()) { String jobStateThatIsNotFound = jobTypeNotFoundLabel(distinctRecurringJobSignatures, distinctScheduledJobSignatures); LOGGER.warn("JobRunr found {} jobs that do not exist anymore in your code. These jobs will fail with a JobNotFoundException (due to a ClassNotFoundException or a MethodNotFoundException)." + "\n\tBelow you can find the method signatures of the jobs that cannot be found anymore: {}", jobStateThatIsNotFound, jobsThatCannotBeFound.stream().map(sign -> "\n\t" + sign + ",").collect(Collectors.joining()) ); } } catch (Exception e) { LOGGER.error("Unexpected exception running `CheckIfAllJobsExistTask`", shouldNotHappenException(e)); } }
@Test void onRunItLogsAllScheduledJobsThatDoNotExist() { when(storageProvider.getRecurringJobs()).thenReturn(new RecurringJobsResult()); when(storageProvider.getDistinctJobSignatures(SCHEDULED)).thenReturn(Set.of( getJobSignature(defaultJobDetails().build()), getJobSignature(classThatDoesNotExistJobDetails().build()) )); checkIfAllJobsExistTask.run(); assertThat(logger) .hasWarningMessageContaining("JobRunr found SCHEDULED jobs that do not exist anymore") .hasWarningMessageContaining("i.dont.exist.Class.notImportant(java.lang.Integer)") .hasNoErrorLogMessages(); }
@Override public ProjectRepositories load(String projectKey, @Nullable String branchBase) { GetRequest request = new GetRequest(getUrl(projectKey, branchBase)); try (WsResponse response = wsClient.call(request)) { try (InputStream is = response.contentStream()) { return processStream(is); } catch (IOException e) { throw new IllegalStateException("Couldn't load project repository for " + projectKey, e); } } catch (RuntimeException e) { if (shouldThrow(e)) { throw e; } LOG.debug("Project repository not available - continuing without it"); return new SingleProjectRepository(); } }
@Test(expected = IllegalStateException.class) public void failOnNonHttp404Exception() { when(wsClient.call(any())).thenThrow(IllegalStateException.class); ProjectRepositories proj = loader.load(PROJECT_KEY, null); assertThat(proj.exists()).isFalse(); }
public void shutdown(final Duration duration) { for (final TaskExecutor t: taskExecutors) { t.requestShutdown(); } signalTaskExecutors(); for (final TaskExecutor t: taskExecutors) { t.awaitShutdown(duration); } }
@Test public void shouldReturnFromAwaitOnInterruption() throws InterruptedException { final AwaitingRunnable awaitingRunnable = new AwaitingRunnable(); final Thread awaitingThread = new Thread(awaitingRunnable); awaitingThread.start(); verify(tasks, timeout(VERIFICATION_TIMEOUT).atLeastOnce()).activeTasks(); awaitingThread.interrupt(); assertTrue(awaitingRunnable.awaitDone.await(VERIFICATION_TIMEOUT, TimeUnit.MILLISECONDS)); awaitingRunnable.shutdown(); }
public SerializableFunction<Row, T> getFromRowFunction() { return fromRowFunction; }
@Test public void testReversedOneOfRowToProto() { ProtoDynamicMessageSchema schemaProvider = schemaFromDescriptor(ReversedOneOf.getDescriptor()); SerializableFunction<Row, DynamicMessage> fromRow = schemaProvider.getFromRowFunction(); assertEquals( REVERSED_ONEOF_PROTO_INT32.toString(), fromRow.apply(REVERSED_ONEOF_ROW_INT32).toString()); assertEquals( REVERSED_ONEOF_PROTO_BOOL.toString(), fromRow.apply(REVERSED_ONEOF_ROW_BOOL).toString()); assertEquals( REVERSED_ONEOF_PROTO_STRING.toString(), fromRow.apply(REVERSED_ONEOF_ROW_STRING).toString()); assertEquals( REVERSED_ONEOF_PROTO_PRIMITIVE.toString(), fromRow.apply(REVERSED_ONEOF_ROW_PRIMITIVE).toString()); }
@Override public <T> Serde<T> createSerde( final Schema schema, final KsqlConfig ksqlConfig, final Supplier<SchemaRegistryClient> srFactory, final Class<T> targetType, final boolean isKey ) { validateSchema(schema); final Optional<Schema> physicalSchema; if (useSchemaRegistryFormat) { physicalSchema = properties.getSchemaId().isPresent() ? Optional.of( SerdeUtils.getAndTranslateSchemaById(srFactory, properties.getSchemaId() .get(), new JsonSchemaTranslator())) : Optional.empty(); } else { physicalSchema = Optional.empty(); } final Converter converter = useSchemaRegistryFormat ? getSchemaRegistryConverter(srFactory.get(), ksqlConfig, properties.getSchemaId(), isKey) : getConverter(); // The translators are used in the serializer & deserializzer only for JSON_SR formats final ConnectDataTranslator dataTranslator = physicalSchema.isPresent() ? new ConnectSRSchemaDataTranslator(physicalSchema.get()) : new ConnectDataTranslator(schema); final Supplier<Serializer<T>> serializer = () -> createSerializer( targetType, dataTranslator, converter ); final Deserializer<T> deserializer = createDeserializer( ksqlConfig, schema, targetType, dataTranslator, converter ); // Sanity check: serializer.get(); return Serdes.serdeFrom( new ThreadLocalSerializer<>(serializer), deserializer ); }
@Test public void shouldUseNewJsonSchemaDeserializerOnJsonSrWhenJsonSchemaConverterIsEnabled() { // Given final ConnectSchema connectSchema = (ConnectSchema) SchemaBuilder.string().build(); when(config.getBoolean(KsqlConfig.KSQL_JSON_SR_CONVERTER_DESERIALIZER_ENABLED)) .thenReturn(true); // When final Serde<String> serde = jsonFactory.createSerde(connectSchema, config, srFactory, String.class, false); // Then assertThat(serde.deserializer(), is(instanceOf(KsqlConnectDeserializer.class))); }
public static EnvironmentVariables toEnvironmentVariables(EnvironmentVariablesConfig environmentVariableConfigs) { final EnvironmentVariables environmentVariables = new EnvironmentVariables(); for (EnvironmentVariableConfig environmentVariableConfig : environmentVariableConfigs) { environmentVariables.add(new EnvironmentVariable(environmentVariableConfig)); } return environmentVariables; }
@Test void toEnvironmentVariables_shouldConvertEnvironmentVariablesConfigToEnvironmentVariable() { final EnvironmentVariablesConfig environmentVariableConfigs = new EnvironmentVariablesConfig(Arrays.asList( new EnvironmentVariableConfig("foo", "bar"), new EnvironmentVariableConfig(new GoCipher(), "baz", "car", true) )); final EnvironmentVariables environmentVariables = EnvironmentVariables.toEnvironmentVariables(environmentVariableConfigs); assertThat(environmentVariables).contains(new EnvironmentVariable("foo", "bar", false), new EnvironmentVariable("baz", "car", true)); }
public static <T> Iterable<T> wrap(final Iterable<T> base) { return () -> { final Iterator<T> itr = base.iterator(); return new Iterator<>() { @Override public boolean hasNext() { return itr.hasNext(); } @Override public T next() { return itr.next(); } @Override public void remove() { itr.remove(); } }; }; }
@Test public void wrap() { List<Integer> lst = Iterators.sequence(1, 4); Iterable<Integer> wrapped = Iterators.wrap(lst); assertThat(wrapped, not(instanceOf(List.class))); Iterator<Integer> iter = wrapped.iterator(); assertTrue(iter.hasNext()); assertEquals(1, (int) iter.next()); assertTrue(iter.hasNext()); assertEquals(2, (int) iter.next()); assertTrue(iter.hasNext()); assertEquals(3, (int) iter.next()); assertFalse(iter.hasNext()); }
public Node chooseRandomWithStorageType(final String scope, final Collection<Node> excludedNodes, StorageType type) { netlock.readLock().lock(); try { if (scope.startsWith("~")) { return chooseRandomWithStorageType( NodeBase.ROOT, scope.substring(1), excludedNodes, type); } else { return chooseRandomWithStorageType( scope, null, excludedNodes, type); } } finally { netlock.readLock().unlock(); } }
@Test public void testChooseRandomWithStorageTypeWithExcluded() throws Exception { Node n; DatanodeDescriptor dd; // below test choose random with exclude, for /l2/d3, every rack has exactly // one host // /l2/d3 has five racks r[1~5] but only r4 and r5 have ARCHIVE // host12 is the one under "/l2/d3/r4", host13 is the one under "/l2/d3/r5" n = CLUSTER.chooseRandomWithStorageType("/l2/d3/r4", null, null, StorageType.ARCHIVE); HashSet<Node> excluded = new HashSet<>(); // exclude the host on r4 (since there is only one host, no randomness here) excluded.add(n); for (int i = 0; i < 10; i++) { n = CLUSTER.chooseRandomWithStorageType("/l2/d3", null, null, StorageType.ARCHIVE); assertTrue(n instanceof DatanodeDescriptor); dd = (DatanodeDescriptor) n; assertTrue(dd.getHostName().equals("host13") || dd.getHostName().equals("host14")); } // test exclude nodes for (int i = 0; i < 10; i++) { n = CLUSTER.chooseRandomWithStorageType("/l2/d3", null, excluded, StorageType.ARCHIVE); assertTrue(n instanceof DatanodeDescriptor); dd = (DatanodeDescriptor) n; assertTrue(dd.getHostName().equals("host14")); } // test exclude scope for (int i = 0; i < 10; i++) { n = CLUSTER.chooseRandomWithStorageType("/l2/d3", "/l2/d3/r4", null, StorageType.ARCHIVE); assertTrue(n instanceof DatanodeDescriptor); dd = (DatanodeDescriptor) n; assertTrue(dd.getHostName().equals("host14")); } // test exclude scope + excluded node with expected null return node for (int i = 0; i < 10; i++) { n = CLUSTER.chooseRandomWithStorageType("/l2/d3", "/l2/d3/r5", excluded, StorageType.ARCHIVE); assertNull(n); } // test exclude scope + excluded node with expected non-null return node n = CLUSTER.chooseRandomWithStorageType("/l1/d2", null, null, StorageType.DISK); dd = (DatanodeDescriptor)n; assertEquals("host6", dd.getHostName()); // exclude the host on r4 (since there is only one host, no randomness here) excluded.add(n); Set<String> expectedSet = new HashSet<>(Arrays.asList("host4", "host5")); for (int i = 0; i < 10; i++) { // under l1, there are four hosts with DISK: // /l1/d1/r1/host2, /l1/d1/r2/host4, /l1/d1/r2/host5 and /l1/d2/r3/host6 // host6 is excludedNode, host2 is under excluded range scope /l1/d1/r1 // so should always return r4 or r5 n = CLUSTER.chooseRandomWithStorageType( "/l1", "/l1/d1/r1", excluded, StorageType.DISK); dd = (DatanodeDescriptor) n; assertTrue(expectedSet.contains(dd.getHostName())); } }
@Override public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object obj, Exception ex) { ThreadLocalUtils.removeRequestTag(); }
@Test public void testAfterCompletion() { ThreadLocalUtils.addRequestTag(Collections.singletonMap("bar", Collections.singletonList("foo"))); Assert.assertNotNull(ThreadLocalUtils.getRequestTag()); // Test afterCompletion to verify if thread variables are released interceptor.afterCompletion(new MockHttpServletRequest(), new MockHttpServletResponse(), new Object(), null); Assert.assertNull(ThreadLocalUtils.getRequestTag()); }
protected Dependency scanFile(@NotNull final File file) { return scanFile(file, null); }
@Test public void testScanFile() throws DatabaseException { try (Engine instance = new Engine(getSettings())) { instance.addFileTypeAnalyzer(new JarAnalyzer()); File file = BaseTest.getResourceAsFile(this, "dwr.jar"); Dependency dwr = instance.scanFile(file); file = BaseTest.getResourceAsFile(this, "org.mortbay.jmx.jar"); instance.scanFile(file); assertEquals(2, instance.getDependencies().length); file = BaseTest.getResourceAsFile(this, "dwr.jar"); Dependency secondDwr = instance.scanFile(file); assertEquals(2, instance.getDependencies().length); } }
@Override public Object convert(String value) { if (value == null || value.isEmpty()) { return 0; } return value.split(splitByEscaped).length; }
@Test(expected = ConfigurationException.class) public void testWithNullConfig() throws Exception { assertEquals(null, new SplitAndCountConverter(config(null)).convert("foo")); }
@Override public double cost(Link link, ResourceContext context) { // explicitly call a method not depending on LinkResourceService return cost(link); }
@Test public void testOrderCost() { TierConstraint constraint32 = new TierConstraint(true, TierConstraint.CostType.ORDER, 3, 2); assertThat(constraint32.cost(link1, resourceContext), is(-1.0)); assertThat(constraint32.cost(link2, resourceContext), is(2.0)); assertThat(constraint32.cost(link3, resourceContext), is(1.0)); TierConstraint constraint123 = new TierConstraint(true, TierConstraint.CostType.ORDER, 1, 2, 3); assertThat(constraint123.cost(link1, resourceContext), is(1.0)); assertThat(constraint123.cost(link2, resourceContext), is(2.0)); assertThat(constraint123.cost(link3, resourceContext), is(3.0)); TierConstraint constraint231 = new TierConstraint(true, TierConstraint.CostType.ORDER, 2, 3, 1); assertThat(constraint231.cost(link1, resourceContext), is(3.0)); assertThat(constraint231.cost(link2, resourceContext), is(1.0)); assertThat(constraint231.cost(link3, resourceContext), is(2.0)); TierConstraint constraint312 = new TierConstraint(true, TierConstraint.CostType.ORDER, 3, 1, 2); assertThat(constraint312.cost(link1, resourceContext), is(2.0)); assertThat(constraint312.cost(link2, resourceContext), is(3.0)); assertThat(constraint312.cost(link3, resourceContext), is(1.0)); }
@Transactional public void deleteApp(App app, String operator) { String appId = app.getAppId(); logger.info("{} is deleting App:{}", operator, appId); List<Cluster> managedClusters = clusterService.findParentClusters(appId); // 1. delete clusters if (Objects.nonNull(managedClusters)) { for (Cluster cluster : managedClusters) { clusterService.delete(cluster.getId(), operator); } } // 2. delete appNamespace appNamespaceService.batchDelete(appId, operator); // 3. delete app appService.delete(app.getId(), operator); }
@Test public void testDeleteApp() { String appId = "someAppId"; App app = new App(); app.setAppId(appId); app.setName("someAppName"); String owner = "someOwnerName"; app.setOwnerName(owner); app.setOwnerEmail("someOwnerName@ctrip.com"); app.setDataChangeCreatedBy(owner); app.setDataChangeLastModifiedBy(owner); app.setDataChangeCreatedTime(new Date()); app = adminService.createNewApp(app); Assert.assertEquals(appId, app.getAppId()); Assert.assertEquals(1, appNamespaceService.findByAppId(appId).size()); Assert.assertEquals(1, clusterService.findClusters(appId).size()); Assert.assertEquals(1, namespaceService.findNamespaces(appId, ConfigConsts.CLUSTER_NAME_DEFAULT).size()); adminService.deleteApp(app, owner); Assert.assertEquals(0, appNamespaceService.findByAppId(appId).size()); Assert.assertEquals(0, clusterService.findClusters(appId).size()); Assert .assertEquals(0, namespaceService.findByAppIdAndNamespaceName(appId, ConfigConsts.CLUSTER_NAME_DEFAULT).size()); }
@Override public String getURL() throws SQLException { return JDBC_URL_START + connection.getURI().toString(); }
@Test public void testGetUrl() throws Exception { DatabaseMetaData metaData = connection.getMetaData(); assertEquals(metaData.getURL(), "jdbc:presto://" + server.getAddress()); }
static CommandLineOptions parse(Iterable<String> options) { CommandLineOptions.Builder optionsBuilder = CommandLineOptions.builder(); List<String> expandedOptions = new ArrayList<>(); expandParamsFiles(options, expandedOptions); Iterator<String> it = expandedOptions.iterator(); while (it.hasNext()) { String option = it.next(); if (!option.startsWith("-")) { optionsBuilder.filesBuilder().add(option).addAll(it); break; } String flag; String value; int idx = option.indexOf('='); if (idx >= 0) { flag = option.substring(0, idx); value = option.substring(idx + 1); } else { flag = option; value = null; } // NOTE: update usage information in UsageException when new flags are added switch (flag) { case "-i": case "-r": case "-replace": case "--replace": optionsBuilder.inPlace(true); break; case "--lines": case "-lines": case "--line": case "-line": parseRangeSet(optionsBuilder.linesBuilder(), getValue(flag, it, value)); break; case "--offset": case "-offset": optionsBuilder.addOffset(parseInteger(it, flag, value)); break; case "--length": case "-length": optionsBuilder.addLength(parseInteger(it, flag, value)); break; case "--aosp": case "-aosp": case "-a": optionsBuilder.aosp(true); break; case "--version": case "-version": case "-v": optionsBuilder.version(true); break; case "--help": case "-help": case "-h": optionsBuilder.help(true); break; case "--fix-imports-only": optionsBuilder.fixImportsOnly(true); break; case "--skip-sorting-imports": optionsBuilder.sortImports(false); break; case "--skip-removing-unused-imports": optionsBuilder.removeUnusedImports(false); break; case "--skip-reflowing-long-strings": optionsBuilder.reflowLongStrings(false); break; case "--skip-javadoc-formatting": optionsBuilder.formatJavadoc(false); break; case "-": optionsBuilder.stdin(true); break; case "-n": case "--dry-run": optionsBuilder.dryRun(true); break; case "--set-exit-if-changed": optionsBuilder.setExitIfChanged(true); break; case "-assume-filename": case "--assume-filename": optionsBuilder.assumeFilename(getValue(flag, it, value)); break; default: throw new IllegalArgumentException("unexpected flag: " + flag); } } return optionsBuilder.build(); }
@Test public void skipSortingImports() { assertThat( CommandLineOptionsParser.parse(Arrays.asList("--skip-sorting-imports")).sortImports()) .isFalse(); }
@Override public void afterPropertiesSet() throws Exception { if (0 == vgroupMapping.size()) { vgroupMapping.put(DEFAULT_TX_GROUP, DEFAULT_TC_CLUSTER); //compatible with old value, will remove next version vgroupMapping.put(DEFAULT_TX_GROUP_OLD, DEFAULT_TC_CLUSTER); } if (0 == grouplist.size()) { grouplist.put(DEFAULT_TC_CLUSTER, DEFAULT_GROUPLIST); } }
@Test public void testAfterPropertiesSet() throws Exception { ServiceProperties serviceProperties = new ServiceProperties(); serviceProperties.afterPropertiesSet(); Assertions.assertEquals("default", serviceProperties.getVgroupMapping().get("default_tx_group")); Assertions.assertEquals("default", serviceProperties.getVgroupMapping().get("my_test_tx_group")); Assertions.assertEquals("127.0.0.1:8091", serviceProperties.getGrouplist().get("default")); serviceProperties = new ServiceProperties(); Map<String, String> vGroupMapping = new HashMap<>(); vGroupMapping.put("default_tx_group", "default"); serviceProperties.setVgroupMapping(vGroupMapping); Map<String, String> groupList = new HashMap<>(); groupList.put("default", "127.0.0.1:8091"); serviceProperties.setGrouplist(groupList); serviceProperties.afterPropertiesSet(); Assertions.assertEquals("default", serviceProperties.getVgroupMapping().get("default_tx_group")); Assertions.assertEquals("127.0.0.1:8091", serviceProperties.getGrouplist().get("default")); }
@Override public String getTargetRestEndpointURL() { return "/taskmanagers/:" + TaskManagerIdPathParameter.KEY + "/metrics"; }
@Test void testUrl() { assertThat(taskManagerMetricsHeaders.getTargetRestEndpointURL()) .isEqualTo("/taskmanagers/:" + TaskManagerIdPathParameter.KEY + "/metrics"); }
@Override public String toString() { return "CSV Input (" + StringUtils.showControlCharacters(String.valueOf(getFieldDelimiter())) + ") " + Arrays.toString(getFilePaths()); }
@Test void testPojoTypeWithPartialFieldInCSV() throws Exception { File tempFile = File.createTempFile("CsvReaderPojoType", "tmp"); tempFile.deleteOnExit(); tempFile.setWritable(true); OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile)); wrt.write("123,NODATA,AAA,NODATA,3.123,BBB\n"); wrt.write("456,NODATA,BBB,NODATA,1.123,AAA\n"); wrt.close(); @SuppressWarnings("unchecked") PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class); CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<>( new Path(tempFile.toURI().toString()), typeInfo, new boolean[] {true, false, true, false, true, true}); inputFormat.configure(new Configuration()); FileInputSplit[] splits = inputFormat.createInputSplits(1); inputFormat.open(splits[0]); validatePojoItem(inputFormat); }
protected long mergeNumDistinctValueEstimator(String columnName, List<NumDistinctValueEstimator> estimators, long oldNumDVs, long newNumDVs) { if (estimators == null || estimators.size() != 2) { throw new IllegalArgumentException("NDV estimators list must be set and contain exactly two elements, " + "found " + (estimators == null ? "null" : estimators.stream().map(NumDistinctValueEstimator::toString).collect(Collectors.joining(", ")))); } NumDistinctValueEstimator oldEst = estimators.get(0); NumDistinctValueEstimator newEst = estimators.get(1); if (oldEst == null && newEst == null) { return mergeNumDVs(oldNumDVs, newNumDVs); } if (oldEst == null) { estimators.set(0, newEst); return mergeNumDVs(oldNumDVs, newEst.estimateNumDistinctValues()); } final long ndv; if (oldEst.canMerge(newEst)) { oldEst.mergeEstimators(newEst); ndv = oldEst.estimateNumDistinctValues(); return ndv; } else { ndv = mergeNumDVs(oldNumDVs, newNumDVs); } LOG.debug("Use bitvector to merge column {}'s ndvs of {} and {} to be {}", columnName, oldNumDVs, newNumDVs, ndv); return ndv; }
@Test public void testMergeNDVEstimatorsFirstNull() { NumDistinctValueEstimator estimator2 = NumDistinctValueEstimatorFactory.getNumDistinctValueEstimator(HLL_2.serialize()); for (ColumnStatsMerger<?> MERGER : MERGERS) { List<NumDistinctValueEstimator> estimatorList = Arrays.asList(null, estimator2); long computedNDV = MERGER.mergeNumDistinctValueEstimator("", estimatorList, 1, 2); assertEquals(estimator2, estimatorList.get(0)); assertEquals(2, computedNDV); } }
@Override public Optional<List<Link>> getPath(OpticalConnectivityId id) { Versioned<OpticalConnectivity> connectivity = connectivityMap.get(id); if (connectivity == null) { log.info("OpticalConnectivity with id {} not found.", id); return Optional.empty(); } return Optional.of(ImmutableList.copyOf(connectivity.value().links())); }
@Test public void testGetPath() { Bandwidth bandwidth = Bandwidth.bps(100); Duration latency = Duration.ofMillis(10); List<Link> links = Stream.of(LINK1, LINK2, LINK3, LINK4, LINK5, LINK6) .collect(Collectors.toList()); OpticalConnectivityId cid = target.setupConnectivity(CP12, CP71, bandwidth, latency); Optional<List<Link>> path = target.getPath(cid); // Checks returned path is as expected assertTrue(path.isPresent()); assertEquals(links, path.get()); }
@PutMapping("/{id}") @RequiresPermissions(value = {"system:resource:editMenu", "system:resource:editButton"}, logical = Logical.OR) public ShenyuAdminResult updateResource(@PathVariable("id") @Valid @Existed(provider = ResourceMapper.class, message = "resource not existed") final String id, @RequestBody final ResourceDTO resourceDTO) { resourceDTO.setId(id); return ShenyuAdminResult.success(ShenyuResultMessage.UPDATE_SUCCESS, resourceService.update(resourceDTO)); }
@Test public void testUpdateResource() throws Exception { final String mockId = "mock-id"; final ResourceDTO resourceDTO = new ResourceDTO(); resourceDTO.setId(mockId); fill(resourceDTO); SpringBeanUtils.getInstance().setApplicationContext(mock(ConfigurableApplicationContext.class)); given(resourceService.update(resourceDTO)).willReturn(1); this.mockMvc.perform(MockMvcRequestBuilders.put("/resource/" + mockId) .contentType(MediaType.APPLICATION_JSON) .content(GsonUtils.getInstance().toJson(resourceDTO))) .andExpect(content().json(GsonUtils.getInstance().toJson(ShenyuAdminResult.success(ShenyuResultMessage.UPDATE_SUCCESS, 1)))) .andReturn(); }
@Override public void monitor(RedisServer master) { connection.sync(RedisCommands.SENTINEL_MONITOR, master.getName(), master.getHost(), master.getPort().intValue(), master.getQuorum().intValue()); }
@Test public void testMonitor() { Collection<RedisServer> masters = connection.masters(); RedisServer master = masters.iterator().next(); master.setName(master.getName() + ":"); connection.monitor(master); }
public static JsonElement parseString(String json) throws JsonSyntaxException { return parseReader(new StringReader(json)); }
@Test public void testParseEmptyString() { JsonElement e = JsonParser.parseString("\" \""); assertThat(e.isJsonPrimitive()).isTrue(); assertThat(e.getAsString()).isEqualTo(" "); }
@Override protected String buildApiSuperPath(final Class<?> clazz, final ShenyuSofaClient beanShenyuClient) { if (Objects.nonNull(beanShenyuClient) && !StringUtils.isBlank(beanShenyuClient.path())) { return beanShenyuClient.path(); } return ""; }
@Test public void testBuildApiSuperPath() { Class<?> clazz = Class.class; given(shenyuSofaClient.path()).willReturn(PATH); String realSuperPath = sofaServiceEventListener.buildApiSuperPath(clazz, shenyuSofaClient); verify(shenyuSofaClient, times(2)).path(); assertEquals(PATH, realSuperPath); }
public List<R> scanForResourcesUri(URI classpathResourceUri) { requireNonNull(classpathResourceUri, "classpathResourceUri must not be null"); if (CLASSPATH_SCHEME.equals(classpathResourceUri.getScheme())) { return scanForClasspathResource(resourceName(classpathResourceUri), NULL_FILTER); } return findResourcesForUri(classpathResourceUri, DEFAULT_PACKAGE_NAME, NULL_FILTER, createUriResource()); }
@Test void scanForResourcesNestedJarUri() { URI jarFileUri = new File("src/test/resources/io/cucumber/core/resource/test/spring-resource.jar").toURI(); URI resourceUri = URI.create("jar:file://" + jarFileUri.getSchemeSpecificPart() + "!/BOOT-INF/lib/jar-resource.jar!/com/example/package-jar-resource.txt"); CucumberException exception = assertThrows( CucumberException.class, () -> resourceScanner.scanForResourcesUri(resourceUri)); assertThat(exception.getMessage(), containsString("Cucumber currently doesn't support classpath scanning in nested jars.")); }
public byte[] verifyMessage(ContentInfo signedMessage, Date date, String oid) { return encapsulatedData(verify(signedMessage, date), oid); }
@Test public void verifyValidCmsWithOidShouldFailIfOidDoesNotMatch() throws Exception { final ContentInfo signedMessage = ContentInfo.getInstance(fixture()); thrown.expect(Asn1Exception.class); thrown.expectMessage("Unexpected content type 2.23.136.1.1.1"); new CmsVerifier(new CertificateVerifier.None()).verifyMessage(signedMessage, "2.23.136.1.2.3"); }
@Override public void subscribe(String serviceName, EventListener listener) throws NacosException { subscribe(serviceName, new ArrayList<>(), listener); }
@Test void testSubscribe1() throws NacosException { //given String serviceName = "service1"; EventListener listener = event -> { }; //when client.subscribe(serviceName, listener); NamingSelectorWrapper wrapper = new NamingSelectorWrapper(serviceName, Constants.DEFAULT_GROUP, Constants.NULL, NamingSelectorFactory.newClusterSelector(Collections.emptyList()), listener); //then verify(changeNotifier, times(1)).registerListener(Constants.DEFAULT_GROUP, serviceName, wrapper); verify(proxy, times(1)).subscribe(serviceName, Constants.DEFAULT_GROUP, ""); }
@Override public ColumnStatisticsObj aggregate(List<ColStatsObjWithSourceInfo> colStatsWithSourceInfo, List<String> partNames, boolean areAllPartsFound) throws MetaException { checkStatisticsList(colStatsWithSourceInfo); ColumnStatisticsObj statsObj = null; String colType; String colName = null; // check if all the ColumnStatisticsObjs contain stats and all the ndv are // bitvectors boolean doAllPartitionContainStats = partNames.size() == colStatsWithSourceInfo.size(); NumDistinctValueEstimator ndvEstimator = null; boolean areAllNDVEstimatorsMergeable = true; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); if (statsObj == null) { colName = cso.getColName(); colType = cso.getColType(); statsObj = ColumnStatsAggregatorFactory.newColumnStaticsObj(colName, colType, cso.getStatsData().getSetField()); LOG.trace("doAllPartitionContainStats for column: {} is: {}", colName, doAllPartitionContainStats); } LongColumnStatsDataInspector columnStatsData = longInspectorFromStats(cso); // check if we can merge NDV estimators if (columnStatsData.getNdvEstimator() == null) { areAllNDVEstimatorsMergeable = false; break; } else { NumDistinctValueEstimator estimator = columnStatsData.getNdvEstimator(); if (ndvEstimator == null) { ndvEstimator = estimator; } else { if (!ndvEstimator.canMerge(estimator)) { areAllNDVEstimatorsMergeable = false; break; } } } } if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator); } LOG.debug("all of the bit vectors can merge for {} is {}", colName, areAllNDVEstimatorsMergeable); ColumnStatisticsData columnStatisticsData = initColumnStatisticsData(); if (doAllPartitionContainStats || colStatsWithSourceInfo.size() < 2) { LongColumnStatsDataInspector aggregateData = null; long lowerBound = 0; long higherBound = 0; double densityAvgSum = 0.0; LongColumnStatsMerger merger = new LongColumnStatsMerger(); for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); LongColumnStatsDataInspector newData = longInspectorFromStats(cso); lowerBound = Math.max(lowerBound, newData.getNumDVs()); higherBound += newData.getNumDVs(); densityAvgSum += ((double) (newData.getHighValue() - newData.getLowValue())) / newData.getNumDVs(); if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { ndvEstimator.mergeEstimators(newData.getNdvEstimator()); } if (aggregateData == null) { aggregateData = newData.deepCopy(); } else { aggregateData.setLowValue(merger.mergeLowValue( merger.getLowValue(aggregateData), merger.getLowValue(newData))); aggregateData.setHighValue(merger.mergeHighValue( merger.getHighValue(aggregateData), merger.getHighValue(newData))); aggregateData.setNumNulls(merger.mergeNumNulls(aggregateData.getNumNulls(), newData.getNumNulls())); aggregateData.setNumDVs(merger.mergeNumDVs(aggregateData.getNumDVs(), newData.getNumDVs())); } } if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { // if all the ColumnStatisticsObjs contain bitvectors, we do not need to // use uniform distribution assumption because we can merge bitvectors // to get a good estimation. aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); } else { long estimation; if (useDensityFunctionForNDVEstimation) { // We have estimation, lowerbound and higherbound. We use estimation // if it is between lowerbound and higherbound. double densityAvg = densityAvgSum / partNames.size(); estimation = (long) ((aggregateData.getHighValue() - aggregateData.getLowValue()) / densityAvg); if (estimation < lowerBound) { estimation = lowerBound; } else if (estimation > higherBound) { estimation = higherBound; } } else { estimation = (long) (lowerBound + (higherBound - lowerBound) * ndvTuner); } aggregateData.setNumDVs(estimation); } columnStatisticsData.setLongStats(aggregateData); } else { // TODO: bail out if missing stats are over a certain threshold // we need extrapolation LOG.debug("start extrapolation for {}", colName); Map<String, Integer> indexMap = new HashMap<>(); for (int index = 0; index < partNames.size(); index++) { indexMap.put(partNames.get(index), index); } Map<String, Double> adjustedIndexMap = new HashMap<>(); Map<String, ColumnStatisticsData> adjustedStatsMap = new HashMap<>(); // while we scan the css, we also get the densityAvg, lowerbound and // higherbound when useDensityFunctionForNDVEstimation is true. double densityAvgSum = 0.0; if (!areAllNDVEstimatorsMergeable) { // if not every partition uses bitvector for ndv, we just fall back to // the traditional extrapolation methods. for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); String partName = csp.getPartName(); LongColumnStatsData newData = cso.getStatsData().getLongStats(); if (useDensityFunctionForNDVEstimation) { densityAvgSum += ((double) (newData.getHighValue() - newData.getLowValue())) / newData.getNumDVs(); } adjustedIndexMap.put(partName, (double) indexMap.get(partName)); adjustedStatsMap.put(partName, cso.getStatsData()); } } else { // we first merge all the adjacent bitvectors that we could merge and // derive new partition names and index. StringBuilder pseudoPartName = new StringBuilder(); double pseudoIndexSum = 0; int length = 0; int curIndex = -1; LongColumnStatsDataInspector aggregateData = null; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); String partName = csp.getPartName(); LongColumnStatsDataInspector newData = longInspectorFromStats(cso); // newData.isSetBitVectors() should be true for sure because we // already checked it before. if (indexMap.get(partName) != curIndex) { // There is bitvector, but it is not adjacent to the previous ones. if (length > 0) { // we have to set ndv adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length); aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); ColumnStatisticsData csd = new ColumnStatisticsData(); csd.setLongStats(aggregateData); adjustedStatsMap.put(pseudoPartName.toString(), csd); if (useDensityFunctionForNDVEstimation) { densityAvgSum += ((double) (aggregateData.getHighValue() - aggregateData.getLowValue())) / aggregateData.getNumDVs(); } // reset everything pseudoPartName = new StringBuilder(); pseudoIndexSum = 0; length = 0; ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator); } aggregateData = null; } curIndex = indexMap.get(partName); pseudoPartName.append(partName); pseudoIndexSum += curIndex; length++; curIndex++; if (aggregateData == null) { aggregateData = newData.deepCopy(); } else { aggregateData.setLowValue(Math.min(aggregateData.getLowValue(), newData.getLowValue())); aggregateData.setHighValue(Math.max(aggregateData.getHighValue(), newData.getHighValue())); aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls()); } ndvEstimator.mergeEstimators(newData.getNdvEstimator()); } if (length > 0) { // we have to set ndv adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length); aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); ColumnStatisticsData csd = new ColumnStatisticsData(); csd.setLongStats(aggregateData); adjustedStatsMap.put(pseudoPartName.toString(), csd); if (useDensityFunctionForNDVEstimation) { densityAvgSum += ((double) (aggregateData.getHighValue() - aggregateData.getLowValue())) / aggregateData.getNumDVs(); } } } extrapolate(columnStatisticsData, partNames.size(), colStatsWithSourceInfo.size(), adjustedIndexMap, adjustedStatsMap, densityAvgSum / adjustedStatsMap.size()); } LOG.debug( "Ndv estimation for {} is {}. # of partitions requested: {}. # of partitions found: {}", colName, columnStatisticsData.getLongStats().getNumDVs(), partNames.size(), colStatsWithSourceInfo.size()); KllHistogramEstimator mergedKllHistogramEstimator = mergeHistograms(colStatsWithSourceInfo); if (mergedKllHistogramEstimator != null) { columnStatisticsData.getLongStats().setHistogram(mergedKllHistogramEstimator.serialize()); } statsObj.setStatsData(columnStatisticsData); return statsObj; }
@Test public void testAggregateMultiStatsOnlySomeAvailableButUnmergeableBitVector() throws MetaException { List<String> partitions = Arrays.asList("part1", "part2", "part3"); ColumnStatisticsData data1 = new ColStatsBuilder<>(long.class).numNulls(1).numDVs(3) .low(1L).high(6L).fmSketch(1, 2, 6).kll(1, 2, 6).build(); ColumnStatisticsData data3 = new ColStatsBuilder<>(long.class).numNulls(3).numDVs(1) .low(7L).high(7L).hll(7).kll(7).build(); List<ColStatsObjWithSourceInfo> statsList = Arrays.asList( createStatsWithInfo(data1, TABLE, COL, partitions.get(0)), createStatsWithInfo(data3, TABLE, COL, partitions.get(2))); LongColumnStatsAggregator aggregator = new LongColumnStatsAggregator(); ColumnStatisticsObj computedStatsObj = aggregator.aggregate(statsList, partitions, false); // hll in case of missing stats is left as null, only numDVs is updated ColumnStatisticsData expectedStats = new ColStatsBuilder<>(long.class).numNulls(6).numDVs(3) .low(1L).high(7L).kll(1, 2, 6, 7).build(); assertEqualStatistics(expectedStats, computedStatsObj.getStatsData()); aggregator.useDensityFunctionForNDVEstimation = true; computedStatsObj = aggregator.aggregate(statsList, partitions, false); // the use of the density function leads to a different estimation for numNDV expectedStats = new ColStatsBuilder<>(long.class).numNulls(6).numDVs(4) .low(1L).high(7L).kll(1, 2, 6, 7).build(); assertEqualStatistics(expectedStats, computedStatsObj.getStatsData()); }
@Override public synchronized void addFunction(final KsqlScalarFunction ksqlFunction) { final UdfFactory udfFactory = udfs.get(ksqlFunction.name().text().toUpperCase()); if (udfFactory == null) { throw new KsqlException("Unknown function factory: " + ksqlFunction.name()); } udfFactory.addFunction(ksqlFunction); }
@Test public void shouldAddFunctionWithSameNameButDifferentReturnTypes() { // Given: givenUdfFactoryRegistered(); functionRegistry.addFunction(func); final KsqlScalarFunction func2 = KsqlScalarFunction.createLegacyBuiltIn( SqlTypes.BIGINT, Collections.singletonList(ParamTypes.LONG), FunctionName.of("func"), Func1.class); // When: functionRegistry.addFunction(func2); // Then: no exception thrown. }
@Override public EntityExcerpt createExcerpt(ViewSummaryDTO nativeEntity) { return EntityExcerpt.builder() .id(ModelId.of(nativeEntity.id())) .type(getModelType()) .title(nativeEntity.title()) .build(); }
@Test @MongoDBFixtures("ViewFacadeTest.json") public void itShouldCreateAEntityExcerpt() { final ViewSummaryDTO viewSummaryDTO = viewSummaryService.get(viewId) .orElseThrow(() -> new NotFoundException("Missing view with id: " + viewId)); final EntityExcerpt entityExcerpt = facade.createExcerpt(viewSummaryDTO); assertThat(entityExcerpt.id().id()).isEqualTo(viewId); assertThat(entityExcerpt.type()).isEqualTo(ModelTypes.SEARCH_V1); assertThat(entityExcerpt.title()).isEqualTo(viewSummaryDTO.title()); }
public static boolean useCrossRepositoryBlobMounts() { return System.getProperty(CROSS_REPOSITORY_BLOB_MOUNTS) == null || Boolean.getBoolean(CROSS_REPOSITORY_BLOB_MOUNTS); }
@Test public void testUseBlobMounts_false() { System.setProperty(JibSystemProperties.CROSS_REPOSITORY_BLOB_MOUNTS, "false"); Assert.assertFalse(JibSystemProperties.useCrossRepositoryBlobMounts()); }
public CompletableFuture<JobClient> submitJob( JobGraph jobGraph, ClassLoader userCodeClassloader) throws Exception { MiniClusterConfiguration miniClusterConfig = getMiniClusterConfig(jobGraph.getMaximumParallelism()); MiniCluster miniCluster = miniClusterFactory.apply(miniClusterConfig); miniCluster.start(); return miniCluster .submitJob(jobGraph) .thenApplyAsync( FunctionUtils.uncheckedFunction( submissionResult -> { org.apache.flink.client.ClientUtils .waitUntilJobInitializationFinished( () -> miniCluster .getJobStatus( submissionResult .getJobID()) .get(), () -> miniCluster .requestJobResult( submissionResult .getJobID()) .get(), userCodeClassloader); return submissionResult; })) .thenApply( result -> new MiniClusterJobClient( result.getJobID(), miniCluster, userCodeClassloader, MiniClusterJobClient.JobFinalizationBehavior .SHUTDOWN_CLUSTER)) .whenComplete( (ignored, throwable) -> { if (throwable != null) { // We failed to create the JobClient and must shutdown to ensure // cleanup. shutDownCluster(miniCluster); } }) .thenApply(Function.identity()); }
@Test void testJobExecution() throws Exception { PerJobMiniClusterFactory perJobMiniClusterFactory = initializeMiniCluster(); JobClient jobClient = perJobMiniClusterFactory .submitJob(getNoopJobGraph(), ClassLoader.getSystemClassLoader()) .get(); JobExecutionResult jobExecutionResult = jobClient.getJobExecutionResult().get(); assertThat(jobExecutionResult).isNotNull(); Map<String, Object> actual = jobClient.getAccumulators().get(); assertThat(actual).isNotNull(); assertThatMiniClusterIsShutdown(); }
public Result resolve(List<PluginDescriptor> plugins) { // create graphs dependenciesGraph = new DirectedGraph<>(); dependentsGraph = new DirectedGraph<>(); // populate graphs Map<String, PluginDescriptor> pluginByIds = new HashMap<>(); for (PluginDescriptor plugin : plugins) { addPlugin(plugin); pluginByIds.put(plugin.getPluginId(), plugin); } log.debug("Graph: {}", dependenciesGraph); // get a sorted list of dependencies List<String> sortedPlugins = dependenciesGraph.reverseTopologicalSort(); log.debug("Plugins order: {}", sortedPlugins); // create the result object Result result = new Result(sortedPlugins); resolved = true; if (sortedPlugins != null) { // no cyclic dependency // detect not found dependencies for (String pluginId : sortedPlugins) { if (!pluginByIds.containsKey(pluginId)) { result.addNotFoundDependency(pluginId); } } } // check dependencies versions for (PluginDescriptor plugin : plugins) { String pluginId = plugin.getPluginId(); String existingVersion = plugin.getVersion(); List<String> dependents = getDependents(pluginId); while (!dependents.isEmpty()) { String dependentId = dependents.remove(0); PluginDescriptor dependent = pluginByIds.get(dependentId); String requiredVersion = getDependencyVersionSupport(dependent, pluginId); boolean ok = checkDependencyVersion(requiredVersion, existingVersion); if (!ok) { result.addWrongDependencyVersion(new WrongDependencyVersion(pluginId, dependentId, existingVersion, requiredVersion)); } } } return result; }
@Test void resolve() { PluginDescriptor pd1 = new DefaultPluginDescriptor() .setPluginId("p1") .setDependencies("p2"); PluginDescriptor pd2 = new DefaultPluginDescriptor() .setPluginId("p2"); List<PluginDescriptor> plugins = Arrays.asList(pd1, pd2); DependencyResolver.Result result = resolver.resolve(plugins); assertFalse(result.hasCyclicDependency()); assertTrue(result.getNotFoundDependencies().isEmpty()); assertTrue(result.getWrongVersionDependencies().isEmpty()); }
static int validatePubsubMessageSize(PubsubMessage message, int maxPublishBatchSize) throws SizeLimitExceededException { int payloadSize = message.getPayload().length; if (payloadSize > PUBSUB_MESSAGE_DATA_MAX_BYTES) { throw new SizeLimitExceededException( "Pubsub message data field of length " + payloadSize + " exceeds maximum of " + PUBSUB_MESSAGE_DATA_MAX_BYTES + " bytes. See https://cloud.google.com/pubsub/quotas#resource_limits"); } int totalSize = payloadSize; @Nullable Map<String, String> attributes = message.getAttributeMap(); if (attributes != null) { if (attributes.size() > PUBSUB_MESSAGE_MAX_ATTRIBUTES) { throw new SizeLimitExceededException( "Pubsub message contains " + attributes.size() + " attributes which exceeds the maximum of " + PUBSUB_MESSAGE_MAX_ATTRIBUTES + ". See https://cloud.google.com/pubsub/quotas#resource_limits"); } // Consider attribute encoding overhead, so it doesn't go over the request limits totalSize += attributes.size() * PUBSUB_MESSAGE_ATTRIBUTE_ENCODE_ADDITIONAL_BYTES; for (Map.Entry<String, String> attribute : attributes.entrySet()) { String key = attribute.getKey(); int keySize = key.getBytes(StandardCharsets.UTF_8).length; if (keySize > PUBSUB_MESSAGE_ATTRIBUTE_MAX_KEY_BYTES) { throw new SizeLimitExceededException( "Pubsub message attribute key '" + key + "' exceeds the maximum of " + PUBSUB_MESSAGE_ATTRIBUTE_MAX_KEY_BYTES + " bytes. See https://cloud.google.com/pubsub/quotas#resource_limits"); } totalSize += keySize; String value = attribute.getValue(); int valueSize = value.getBytes(StandardCharsets.UTF_8).length; if (valueSize > PUBSUB_MESSAGE_ATTRIBUTE_MAX_VALUE_BYTES) { throw new SizeLimitExceededException( "Pubsub message attribute value for key '" + key + "' starting with '" + value.substring(0, Math.min(256, value.length())) + "' exceeds the maximum of " + PUBSUB_MESSAGE_ATTRIBUTE_MAX_VALUE_BYTES + " bytes. See https://cloud.google.com/pubsub/quotas#resource_limits"); } totalSize += valueSize; } } if (totalSize > maxPublishBatchSize) { throw new SizeLimitExceededException( "Pubsub message of length " + totalSize + " exceeds maximum of " + maxPublishBatchSize + " bytes, when considering the payload and attributes. " + "See https://cloud.google.com/pubsub/quotas#resource_limits"); } return totalSize; }
@Test public void testValidatePubsubMessageSizeAttributeValueTooLarge() { byte[] data = new byte[1024]; String attributeKey = "key"; String attributeValue = RandomStringUtils.randomAscii(1025); Map<String, String> attributes = ImmutableMap.of(attributeKey, attributeValue); PubsubMessage message = new PubsubMessage(data, attributes); assertThrows( SizeLimitExceededException.class, () -> PreparePubsubWriteDoFn.validatePubsubMessageSize( message, PUBSUB_MESSAGE_MAX_TOTAL_SIZE)); }
ScriptEngine createScriptEngine(TbContext ctx, TbLogNodeConfiguration config) { return ctx.createScriptEngine(config.getScriptLang(), ScriptLanguage.TBEL.equals(config.getScriptLang()) ? config.getTbelScript() : config.getJsScript()); }
@Test void givenScriptEngineLangJs_whenCreateScriptEngine_thenSupplyJsScript(){ TbLogNodeConfiguration configJs = new TbLogNodeConfiguration().defaultConfiguration(); configJs.setScriptLang(ScriptLanguage.JS); configJs.setJsScript(configJs.getJsScript() + " // This is JS script " + UUID.randomUUID()); TbLogNode node = new TbLogNode(); TbContext ctx = mock(TbContext.class); node.createScriptEngine(ctx, configJs); verify(ctx).createScriptEngine(ScriptLanguage.JS, configJs.getJsScript()); verifyNoMoreInteractions(ctx); }
public static String convertFreshnessToCron(IntervalFreshness intervalFreshness) { switch (intervalFreshness.getTimeUnit()) { case SECOND: return validateAndConvertCron( intervalFreshness, SECOND_CRON_UPPER_BOUND, SECOND_CRON_EXPRESSION_TEMPLATE); case MINUTE: return validateAndConvertCron( intervalFreshness, MINUTE_CRON_UPPER_BOUND, MINUTE_CRON_EXPRESSION_TEMPLATE); case HOUR: return validateAndConvertCron( intervalFreshness, HOUR_CRON_UPPER_BOUND, HOUR_CRON_EXPRESSION_TEMPLATE); case DAY: return validateAndConvertDayCron(intervalFreshness); default: throw new ValidationException( String.format( "Unknown freshness time unit: %s.", intervalFreshness.getTimeUnit())); } }
@Test void testConvertDayFreshnessToCronExpression() { // verify illegal freshness assertThatThrownBy(() -> convertFreshnessToCron(IntervalFreshness.ofDay("2"))) .isInstanceOf(ValidationException.class) .hasMessageContaining( "In full refresh mode, freshness must be 1 when the time unit is DAY."); String actual1 = convertFreshnessToCron(IntervalFreshness.ofDay("1")); assertThat(actual1).isEqualTo("0 0 0 * * ? *"); }
public <T> T getBean(final Class<T> type) { return applicationContext.getBean(type); }
@Test public void testGetBean() { ConfigurableApplicationContext applicationContext = mock(ConfigurableApplicationContext.class); when(applicationContext.getBean(TestBean.class)).thenReturn(new TestBean()); springBeanUtilsUnderTest.setApplicationContext(applicationContext); final TestBean result = springBeanUtilsUnderTest.getBean(TestBean.class); assertNotNull(result); }
public Output run(RunContext runContext) throws Exception { Logger logger = runContext.logger(); URI from = new URI(runContext.render(this.uri)); File tempFile = runContext.workingDir().createTempFile(filenameFromURI(from)).toFile(); // output Output.OutputBuilder builder = Output.builder(); // do it try ( ReactorStreamingHttpClient client = this.streamingClient(runContext, this.method); BufferedOutputStream output = new BufferedOutputStream(new FileOutputStream(tempFile)); ) { @SuppressWarnings("unchecked") HttpRequest<String> request = this.request(runContext); Long size = client .exchangeStream(request) .map(throwFunction(response -> { if (builder.code == null) { builder .code(response.code()) .headers(response.getHeaders().asMap()); } if (response.getBody().isPresent()) { byte[] bytes = response.getBody().get().toByteArray(); output.write(bytes); return (long) bytes.length; } else { return 0L; } })) .reduce(Long::sum) .block(); if (size == null) { size = 0L; } if (builder.headers != null && builder.headers.containsKey("Content-Length")) { long length = Long.parseLong(builder.headers.get("Content-Length").getFirst()); if (length != size) { throw new IllegalStateException("Invalid size, got " + size + ", expected " + length); } } output.flush(); runContext.metric(Counter.of("response.length", size, this.tags(request, null))); builder.length(size); if (size == 0) { if (this.failOnEmptyResponse) { throw new HttpClientResponseException("No response from server", HttpResponse.status(HttpStatus.SERVICE_UNAVAILABLE)); } else { logger.warn("File '{}' is empty", from); } } String filename = null; if (builder.headers != null && builder.headers.containsKey("Content-Disposition")) { String contentDisposition = builder.headers.get("Content-Disposition").getFirst(); filename = filenameFromHeader(runContext, contentDisposition); } builder.uri(runContext.storage().putFile(tempFile, filename)); logger.debug("File '{}' downloaded to '{}'", from, builder.uri); return builder.build(); } }
@Test void allowNoResponse() throws IOException { EmbeddedServer embeddedServer = applicationContext.getBean(EmbeddedServer.class); embeddedServer.start(); Download task = Download.builder() .id(DownloadTest.class.getSimpleName()) .failOnEmptyResponse(false) .type(DownloadTest.class.getName()) .uri(embeddedServer.getURI() + "/204") .build(); RunContext runContext = TestsUtils.mockRunContext(this.runContextFactory, task, ImmutableMap.of()); Download.Output output = assertDoesNotThrow(() -> task.run(runContext)); assertThat(output.getLength(), is(0L)); assertThat(IOUtils.toString(this.storageInterface.get(null, output.getUri()), StandardCharsets.UTF_8), is("")); }
@Override public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); xhtml.startDocument(); EmbeddedDocumentExtractor extractor = EmbeddedDocumentUtil.getEmbeddedDocumentExtractor(context); String mediaType = metadata.get(Metadata.CONTENT_TYPE); if (mediaType != null && mediaType.contains("version=5")) { throw new UnsupportedFormatException("Tika does not yet support rar version 5."); } Archive rar = null; try (TemporaryResources tmp = new TemporaryResources()) { TikaInputStream tis = TikaInputStream.get(stream, tmp, metadata); rar = new Archive(tis.getFile()); if (rar.isEncrypted()) { throw new EncryptedDocumentException(); } //Without this BodyContentHandler does not work xhtml.element("div", " "); FileHeader header = rar.nextFileHeader(); while (header != null && !Thread.currentThread().isInterrupted()) { if (!header.isDirectory()) { Metadata entrydata = PackageParser.handleEntryMetadata(header.getFileName(), header.getCTime(), header.getMTime(), header.getFullUnpackSize(), xhtml); try (InputStream subFile = rar.getInputStream(header)) { if (extractor.shouldParseEmbedded(entrydata)) { extractor.parseEmbedded(subFile, handler, entrydata, true); } } } header = rar.nextFileHeader(); } } catch (RarException e) { throw new TikaException("RarParser Exception", e); } finally { if (rar != null) { rar.close(); } } xhtml.endDocument(); }
@Test public void testEmbedded() throws Exception { ContentHandler handler = new BodyContentHandler(); Metadata metadata = new Metadata(); try (InputStream stream = getResourceAsStream("/test-documents/test-documents.rar")) { AUTO_DETECT_PARSER.parse(stream, handler, metadata, trackingContext); } // Should have found all 9 documents, but not the directory assertEquals(9, tracker.filenames.size()); assertEquals(9, tracker.mediatypes.size()); assertEquals(9, tracker.modifiedAts.size()); // Should have names but not content types, as rar doesn't // store the content types assertEquals("test-documents/testEXCEL.xls", tracker.filenames.get(0)); assertEquals("test-documents/testHTML.html", tracker.filenames.get(1)); assertEquals("test-documents/testOpenOffice2.odt", tracker.filenames.get(2)); assertEquals("test-documents/testPDF.pdf", tracker.filenames.get(3)); assertEquals("test-documents/testPPT.ppt", tracker.filenames.get(4)); assertEquals("test-documents/testRTF.rtf", tracker.filenames.get(5)); assertEquals("test-documents/testTXT.txt", tracker.filenames.get(6)); assertEquals("test-documents/testWORD.doc", tracker.filenames.get(7)); assertEquals("test-documents/testXML.xml", tracker.filenames.get(8)); for (String type : tracker.mediatypes) { assertNull(type); } for (String crt : tracker.createdAts) { assertNull(crt); } for (String mod : tracker.modifiedAts) { assertNotNull(mod); assertTrue(mod.startsWith("20"), "Modified at " + mod); } // Should have filenames in the content string String content = handler.toString(); assertContains("test-documents/testHTML.html", content); assertContains("test-documents/testEXCEL.xls", content); assertContains("test-documents/testOpenOffice2.odt", content); assertContains("test-documents/testPDF.pdf", content); assertContains("test-documents/testPPT.ppt", content); assertContains("test-documents/testRTF.rtf", content); assertContains("test-documents/testTXT.txt", content); assertContains("test-documents/testWORD.doc", content); assertContains("test-documents/testXML.xml", content); }
@Override public List<String> findRolesLikeRoleName(String role) { String sql = "SELECT role FROM roles WHERE role LIKE ? " + SQL_DERBY_ESCAPE_BACK_SLASH_FOR_LIKE; return databaseOperate.queryMany(sql, new String[] {"%" + role + "%"}, String.class); }
@Test void testFindRolesLikeRoleName() { List<String> role = embeddedRolePersistService.findRolesLikeRoleName("role"); assertEquals(0, role.size()); }
public NSBundle bundle() { if(cached != null) { return cached; } if(log.isInfoEnabled()) { log.info("Loading application bundle resources"); } final NSBundle main = NSBundle.mainBundle(); if(null == main) { cached = null; } else { final Local executable = new FinderLocal(main.executablePath()); cached = this.bundle(main, executable); } return cached; }
@Test @Ignore public void testSymbolicLink() { final NSBundle bundle = new BundleApplicationResourcesFinder().bundle(NSBundle.bundleWithPath("."), new Local("/usr/bin/java")); assertNotNull(bundle); assertEquals(NSBundle.bundleWithPath("/System/Library/Frameworks/JavaVM.framework/Versions/A"), bundle); }
public static Map<String, String> rebuildCreateTableProperties(Map<String, String> createProperties) { ImmutableMap.Builder<String, String> tableProperties = ImmutableMap.builder(); createProperties.entrySet().forEach(tableProperties::put); String fileFormat = createProperties.getOrDefault(FILE_FORMAT, TableProperties.DEFAULT_FILE_FORMAT_DEFAULT); String compressionCodec = null; if ("parquet".equalsIgnoreCase(fileFormat)) { tableProperties.put(TableProperties.DEFAULT_FILE_FORMAT, "parquet"); compressionCodec = createProperties.getOrDefault(COMPRESSION_CODEC, TableProperties.PARQUET_COMPRESSION_DEFAULT); tableProperties.put(TableProperties.PARQUET_COMPRESSION, compressionCodec); } else if ("avro".equalsIgnoreCase(fileFormat)) { tableProperties.put(TableProperties.DEFAULT_FILE_FORMAT, "avro"); compressionCodec = createProperties.getOrDefault(COMPRESSION_CODEC, TableProperties.AVRO_COMPRESSION_DEFAULT); tableProperties.put(TableProperties.AVRO_COMPRESSION, compressionCodec); } else if ("orc".equalsIgnoreCase(fileFormat)) { tableProperties.put(TableProperties.DEFAULT_FILE_FORMAT, "orc"); compressionCodec = createProperties.getOrDefault(COMPRESSION_CODEC, TableProperties.ORC_COMPRESSION_DEFAULT); tableProperties.put(TableProperties.ORC_COMPRESSION, compressionCodec); } else if (fileFormat != null) { throw new IllegalArgumentException("Unsupported format in USING: " + fileFormat); } if (!PARQUET_COMPRESSION_TYPE_MAP.containsKey(compressionCodec.toLowerCase(Locale.ROOT))) { throw new IllegalArgumentException("Unsupported compression codec in USING: " + compressionCodec); } tableProperties.put(TableProperties.FORMAT_VERSION, "1"); return tableProperties.build(); }
@Test public void testRebuildCreateTableProperties() { Map<String, String> source = ImmutableMap.of("file_format", "orc"); Map<String, String> target = IcebergApiConverter.rebuildCreateTableProperties(source); Assert.assertEquals("orc", target.get(DEFAULT_FILE_FORMAT)); source = ImmutableMap.of("file_format", "orc", "compression_codec", "snappy"); target = IcebergApiConverter.rebuildCreateTableProperties(source); Assert.assertEquals("snappy", target.get(ORC_COMPRESSION)); source = ImmutableMap.of("file_format", "parquet", "compression_codec", "snappy"); target = IcebergApiConverter.rebuildCreateTableProperties(source); Assert.assertEquals("snappy", target.get(PARQUET_COMPRESSION)); source = ImmutableMap.of("file_format", "avro", "compression_codec", "zstd"); target = IcebergApiConverter.rebuildCreateTableProperties(source); Assert.assertEquals("zstd", target.get(AVRO_COMPRESSION)); }
synchronized void add(int splitCount) { int pos = count % history.length; history[pos] = splitCount; count += 1; }
@Test public void testExactFullHistory() { EnumerationHistory history = new EnumerationHistory(3); history.add(1); history.add(2); history.add(3); int[] expectedHistorySnapshot = {1, 2, 3}; testHistory(history, expectedHistorySnapshot); }
static boolean isNewDatabase(String uppercaseProductName) { if (SUPPORTED_DATABASE_NAMES.contains(uppercaseProductName)) { return false; } return DETECTED_DATABASE_NAMES.add(uppercaseProductName); }
@Test public void test_notSupportedDB() { String dbName = "cassandra"; boolean newDB = SupportedDatabases.isNewDatabase(dbName); assertThat(newDB).isTrue(); }
public double calculateAveragePercentageUsedBy(NormalizedResources used, double totalMemoryMb, double usedMemoryMb) { int skippedResourceTypes = 0; double total = 0.0; if (usedMemoryMb > totalMemoryMb) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } if (totalMemoryMb != 0.0) { total += usedMemoryMb / totalMemoryMb; } else { skippedResourceTypes++; } double totalCpu = getTotalCpu(); if (used.getTotalCpu() > getTotalCpu()) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } if (totalCpu != 0.0) { total += used.getTotalCpu() / getTotalCpu(); } else { skippedResourceTypes++; } if (used.otherResources.length > otherResources.length) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } for (int i = 0; i < otherResources.length; i++) { double totalValue = otherResources[i]; double usedValue; if (i >= used.otherResources.length) { //Resources missing from used are using none of that resource usedValue = 0.0; } else { usedValue = used.otherResources[i]; } if (usedValue > totalValue) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } if (totalValue == 0.0) { //Skip any resources where the total is 0, the percent used for this resource isn't meaningful. //We fall back to prioritizing by cpu, memory and any other resources by ignoring this value skippedResourceTypes++; continue; } total += usedValue / totalValue; } //Adjust the divisor for the average to account for any skipped resources (those where the total was 0) int divisor = 2 + otherResources.length - skippedResourceTypes; if (divisor == 0) { /* * This is an arbitrary choice to make the result consistent with calculateMin. Any value would be valid here, becase there are * no (non-zero) resources in the total set of resources, so we're trying to average 0 values. */ return 100.0; } else { return (total * 100.0) / divisor; } }
@Test public void testCalculateAvgWithTooLittleResourceInTotal() { Map<String, Double> allResourcesMap = new HashMap<>(); allResourcesMap.put(Constants.COMMON_CPU_RESOURCE_NAME, 2.0); allResourcesMap.put(gpuResourceName, 1.0); NormalizedResources resources = new NormalizedResources(normalize(allResourcesMap)); Map<String, Double> usedResourcesMap = new HashMap<>(); usedResourcesMap.put(Constants.COMMON_CPU_RESOURCE_NAME, 1.0); usedResourcesMap.put(gpuResourceName, 5.0); NormalizedResources usedResources = new NormalizedResources(normalize(usedResourcesMap)); assertThrows(IllegalArgumentException.class, () -> resources.calculateAveragePercentageUsedBy(usedResources, 4, 1)); }
@SuppressWarnings("unchecked") public static <T> TypeInformation<T> convert(String jsonSchema) { Preconditions.checkNotNull(jsonSchema, "JSON schema"); final ObjectMapper mapper = JacksonMapperFactory.createObjectMapper(); mapper.getFactory() .enable(JsonParser.Feature.ALLOW_COMMENTS) .enable(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES) .enable(JsonParser.Feature.ALLOW_SINGLE_QUOTES); final JsonNode node; try { node = mapper.readTree(jsonSchema); } catch (IOException e) { throw new IllegalArgumentException("Invalid JSON schema.", e); } return (TypeInformation<T>) convertType("<root>", node, node); }
@Test void testMissingType() { assertThatThrownBy(() -> JsonRowSchemaConverter.convert("{ }")) .isInstanceOf(IllegalArgumentException.class); }
public ReencryptionPendingInodeIdCollector getTraverser() { return traverser; }
@Test public void testThrottle() throws Exception { final Configuration conf = new Configuration(); conf.setDouble(DFS_NAMENODE_REENCRYPT_THROTTLE_LIMIT_HANDLER_RATIO_KEY, 0.5); final ReencryptionHandler rh = mockReencryptionhandler(conf); // mock StopWatches so all = 30s, locked = 20s. With ratio = .5, throttle // should wait for 30 * 0.5 - 20 = 5s. final StopWatch mockAll = Mockito.mock(StopWatch.class); Mockito.when(mockAll.now(TimeUnit.MILLISECONDS)).thenReturn((long) 30000); Mockito.when(mockAll.reset()).thenReturn(mockAll); final StopWatch mockLocked = Mockito.mock(StopWatch.class); Mockito.when(mockLocked.now(TimeUnit.MILLISECONDS)) .thenReturn((long) 20000); Mockito.when(mockLocked.reset()).thenReturn(mockLocked); final BlockingQueue<Runnable> queue = new LinkedBlockingQueue<>(); Whitebox.setInternalState(rh, "throttleTimerAll", mockAll); Whitebox.setInternalState(rh, "throttleTimerLocked", mockLocked); Whitebox.setInternalState(rh, "taskQueue", queue); final StopWatch sw = new StopWatch().start(); rh.getTraverser().throttle(); sw.stop(); assertTrue("should have throttled for at least 8 second", sw.now(TimeUnit.MILLISECONDS) > 8000); assertTrue("should have throttled for at most 12 second", sw.now(TimeUnit.MILLISECONDS) < 12000); }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldParseCreateConnectorStatement() { // Given: final String createConnector = "CREATE SOURCE CONNECTOR `jdbc-connector` WITH(\n" + " \"connector.class\"='io.confluent.connect.jdbc.JdbcSourceConnector',\n" + " \"connection.url\"='jdbc:postgresql://localhost:5432/my.db',\n" + " \"mode\"='bulk',\n" + " \"topic.prefix\"='jdbc-',\n" + " \"table.whitelist\"='users',\n" + " \"key\"='username');"; // When: List<CommandParser.ParsedCommand> commands = parse(createConnector); // Then: assertThat(commands.size(), is(1)); assertThat(commands.get(0).getStatement().isPresent(), is (true)); assertThat(commands.get(0).getStatement().get(), instanceOf(CreateConnector.class)); assertThat(commands.get(0).getCommand(), is(createConnector)); assertThat(((CreateConnector) commands.get(0).getStatement().get()).getName(), is("jdbc-connector")); assertThat(((CreateConnector) commands.get(0).getStatement().get()).getType() == CreateConnector.Type.SOURCE, is(true)); assertThat(((CreateConnector) commands.get(0).getStatement().get()).getConfig().size(), is(6)); assertThat(((CreateConnector) commands.get(0).getStatement().get()).getConfig().get("connector.class").getValue(), is("io.confluent.connect.jdbc.JdbcSourceConnector")); assertThat(((CreateConnector) commands.get(0).getStatement().get()).getConfig().get("connection.url").getValue(), is("jdbc:postgresql://localhost:5432/my.db")); assertThat(((CreateConnector) commands.get(0).getStatement().get()).getConfig().get("mode").getValue(), is("bulk")); assertThat(((CreateConnector) commands.get(0).getStatement().get()).getConfig().get("topic.prefix").getValue(), is("jdbc-")); assertThat(((CreateConnector) commands.get(0).getStatement().get()).getConfig().get("table.whitelist").getValue(), is("users")); assertThat(((CreateConnector) commands.get(0).getStatement().get()).getConfig().get("key").getValue(), is("username")); }
public void createQprofileChangesForRuleUpdates(DbSession dbSession, Set<PluginRuleUpdate> pluginRuleUpdates) { List<QProfileChangeDto> changesToPersist = pluginRuleUpdates.stream() .flatMap(pluginRuleUpdate -> { RuleChangeDto ruleChangeDto = createNewRuleChange(pluginRuleUpdate); insertRuleChange(dbSession, ruleChangeDto); return findQualityProfilesForRule(dbSession, pluginRuleUpdate.getRuleUuid()).stream() .map(qualityProfileUuid -> buildQprofileChangeDtoForRuleChange(qualityProfileUuid, ruleChangeDto)); }).toList(); if (!changesToPersist.isEmpty()) { dbClient.qProfileChangeDao().bulkInsert(dbSession, changesToPersist); } }
@Test public void updateWithoutCommit_whenOneRuleChangedItsAttribute_thenInsertRuleChangeButNotImpactChange() { PluginRuleUpdate pluginRuleUpdate = new PluginRuleUpdate(); pluginRuleUpdate.setNewCleanCodeAttribute(CleanCodeAttribute.CLEAR); pluginRuleUpdate.setOldCleanCodeAttribute(CleanCodeAttribute.TESTED); pluginRuleUpdate.setRuleUuid(RULE_UUID); underTest.createQprofileChangesForRuleUpdates(dbSession, Set.of(pluginRuleUpdate)); verify(ruleChangeDao).insert(argThat(dbSession::equals), argThat(ruleChangeDto -> ruleChangeDto.getNewCleanCodeAttribute() == CleanCodeAttribute.CLEAR && ruleChangeDto.getOldCleanCodeAttribute() == CleanCodeAttribute.TESTED && ruleChangeDto.getRuleUuid().equals(RULE_UUID) && ruleChangeDto.getRuleImpactChanges().isEmpty())); }
@Override public boolean isSupported() { return true; }
@Test public void isSupported() { SamsungImpl samsung = new SamsungImpl(mApplication); Assert.assertTrue(samsung.isSupported()); }
@SuppressWarnings("java:S108") public static void closeQuietly(AutoCloseable closeable) { if (closeable == null) { return; } try { closeable.close(); } catch (Exception ignore) { } }
@Test public void test_closeQuietly_whenNull() { closeQuietly(null); }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Either<?, ?> either = (Either<?, ?>) o; return right == either.right && Objects.equals(value, either.value); }
@Test void testEquals() { assertEquals(Either.<String, Integer>ofRight(1), right); assertNotEquals(Either.<String, Integer>ofRight(2), right); assertEquals(Either.<String, Integer>ofRight(1).hashCode(), right.hashCode()); assertNotEquals(Either.<String, Integer>ofRight(2).hashCode(), right.hashCode()); assertTrue(right.isRightEqual(1)); assertFalse(right.isRightEqual(2)); assertEquals(Either.<String, Integer>ofLeft("A"), left); assertNotEquals(Either.<String, Integer>ofLeft("B"), left); assertEquals(Either.<String, Integer>ofLeft("A").hashCode(), left.hashCode()); assertNotEquals(Either.<String, Integer>ofLeft("B").hashCode(), left.hashCode()); assertFalse(left.isRightEqual(1)); assertFalse(left.isRightEqual(2)); }
@Override public boolean skip(final ServerWebExchange exchange) { return skipExceptHttpLike(exchange); }
@Test public void testSkip() { ServerWebExchange exchangeNormal = generateServerWebExchange(); assertTrue(nettyHttpClientPlugin.skip(exchangeNormal)); ServerWebExchange exchangeHttp = generateServerWebExchange(); when(((ShenyuContext) exchangeHttp.getAttributes().get(Constants.CONTEXT)).getRpcType()) .thenReturn(RpcTypeEnum.HTTP.getName()); assertFalse(nettyHttpClientPlugin.skip(exchangeHttp)); ServerWebExchange exchangeSpringCloud = generateServerWebExchange(); when(((ShenyuContext) exchangeSpringCloud.getAttributes().get(Constants.CONTEXT)).getRpcType()) .thenReturn(RpcTypeEnum.SPRING_CLOUD.getName()); assertFalse(nettyHttpClientPlugin.skip(exchangeSpringCloud)); }
@Override public Object[] toArray() { return mElements; }
@Test public void toArray() { String[] array = new String[]{"a", "b", "c", "d"}; UnmodifiableArrayList<String> list = new UnmodifiableArrayList<>(array); assertArrayEquals(array, list.toArray()); array[0] = "c"; assertArrayEquals(array, list.toArray()); }
@Override public IN next() { return supplyWithLock( () -> { IN record; if (cacheQueue.size() > 0) { if (!closed && cacheQueue.size() == DEFAULT_MAX_CACHE_NUM) { cacheNotFull.signalAll(); } record = cacheQueue.poll(); return record; } else { if (closed) { return null; } waitCacheNotEmpty(); return cacheQueue.poll(); } }); }
@Test void testNext() throws ExecutionException, InterruptedException { CompletableFuture<List<String>> result = new CompletableFuture<>(); CompletableFuture<Object> udfFinishTrigger = new CompletableFuture<>(); MapPartitionIterator<String> iterator = new MapPartitionIterator<>( inputIterator -> { List<String> strings = new ArrayList<>(); for (int index = 0; index < RECORD_NUMBER; ++index) { strings.add(inputIterator.next()); } result.complete(strings); try { udfFinishTrigger.get(); } catch (InterruptedException | ExecutionException e) { ExceptionUtils.rethrow(e); } }); // 1.Test next() when the cache is not empty in the MapPartitionIterator. addRecordToIterator(RECORD_NUMBER, iterator); List<String> results = result.get(); assertThat(results.size()).isEqualTo(RECORD_NUMBER); assertThat(results.get(0)).isEqualTo(RECORD); assertThat(results.get(1)).isEqualTo(RECORD); assertThat(results.get(2)).isEqualTo(RECORD); // 2.Test next() when the cache is empty in the MapPartitionIterator. CompletableFuture<Object> mockedUDFThread1 = new CompletableFuture<>(); CompletableFuture<String> nextFinishIdentifier1 = new CompletableFuture<>(); mockedUDFThread1.thenRunAsync( () -> { String next = iterator.next(); nextFinishIdentifier1.complete(next); }); mockedUDFThread1.complete(null); assertThat(nextFinishIdentifier1).isNotCompleted(); iterator.addRecord(RECORD); nextFinishIdentifier1.get(); assertThat(nextFinishIdentifier1).isCompletedWithValue(RECORD); // 2.Test next() when the MapPartitionIterator is closed. CompletableFuture<Object> mockedUDFThread2 = new CompletableFuture<>(); CompletableFuture<String> nextFinishIdentifier2 = new CompletableFuture<>(); mockedUDFThread2.thenRunAsync( () -> { String next = iterator.next(); nextFinishIdentifier2.complete(next); udfFinishTrigger.complete(null); }); mockedUDFThread2.complete(null); assertThat(nextFinishIdentifier2).isNotCompleted(); iterator.close(); assertThat(nextFinishIdentifier2).isCompletedWithValue(null); assertThat(udfFinishTrigger).isCompleted(); }
@Override public void doRun() { final Instant mustBeOlderThan = Instant.now().minus(maximumSearchAge); searchDbService.getExpiredSearches(findReferencedSearchIds(), mustBeOlderThan).forEach(searchDbService::delete); }
@Test public void testForReferencedSearches() { final ViewSummaryDTO view = ViewSummaryDTO.builder() .title("my-view") .searchId(IN_USE_SEARCH_ID) .build(); when(viewService.streamAll()).thenReturn(Stream.of(view)); final Search search = Search.builder() .id(IN_USE_SEARCH_ID) .createdAt(DateTime.now(DateTimeZone.UTC).minus(Duration.standardDays(30))) .build(); when(searchDbService.streamAll()).thenReturn(Stream.of(search)); this.searchesCleanUpJob.doRun(); // Verify that search ids for standard views and resolved views are passed in as neverDeleteIds. final ArgumentCaptor<HashSet<String>> searchIdsCaptor = ArgumentCaptor.forClass((Class) Set.class); verify(searchDbService, times(1)).getExpiredSearches(searchIdsCaptor.capture(), any()); assertThat(searchIdsCaptor.getValue().contains(IN_USE_SEARCH_ID)).isTrue(); assertThat(searchIdsCaptor.getValue().contains(IN_USE_RESOLVER_SEARCH_ID)).isTrue(); assertThat(searchIdsCaptor.getValue().contains(IN_USE_STATIC_SEARCH_ID)).isTrue(); verify(searchDbService, never()).delete(any()); }
static Properties adminClientConfiguration(String bootstrapHostnames, PemTrustSet kafkaCaTrustSet, PemAuthIdentity authIdentity, Properties config) { if (config == null) { throw new InvalidConfigurationException("The config parameter should not be null"); } config.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapHostnames); // configuring TLS encryption if requested if (kafkaCaTrustSet != null) { config.putIfAbsent(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); config.setProperty(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "PEM"); config.setProperty(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG, kafkaCaTrustSet.trustedCertificatesString()); } // configuring TLS client authentication if (authIdentity != null) { config.putIfAbsent(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); config.setProperty(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "PEM"); config.setProperty(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG, authIdentity.certificateChainAsPem()); config.setProperty(SslConfigs.SSL_KEYSTORE_KEY_CONFIG, authIdentity.privateKeyAsPem()); } config.putIfAbsent(AdminClientConfig.METADATA_MAX_AGE_CONFIG, "30000"); config.putIfAbsent(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, "10000"); config.putIfAbsent(AdminClientConfig.RETRIES_CONFIG, "3"); config.putIfAbsent(AdminClientConfig.DEFAULT_API_TIMEOUT_MS_CONFIG, "40000"); return config; }
@Test public void testMTlsWithPublicCAConnection() { Properties config = DefaultAdminClientProvider.adminClientConfiguration("my-kafka:9092", null, mockPemAuthIdentity(), new Properties()); assertThat(config.size(), is(9)); assertDefaultConfigs(config); assertThat(config.get(AdminClientConfig.SECURITY_PROTOCOL_CONFIG), is("SSL")); assertThat(config.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG).toString(), is("PEM")); assertThat(config.get(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG).toString(), is("user-cert")); assertThat(config.get(SslConfigs.SSL_KEYSTORE_KEY_CONFIG).toString(), is("user-key")); }
public Collection<LocalSession> getLocalSessions() { return List.copyOf(localSessionCache.values()); }
@Test public void require_that_new_invalid_application_throws_exception() throws Exception { MockModelFactory failingFactory = new MockModelFactory(); failingFactory.vespaVersion = new Version(1, 2, 0); failingFactory.throwOnLoad = true; MockModelFactory okFactory = new MockModelFactory(); okFactory.vespaVersion = new Version(1, 1, 0); okFactory.throwOnLoad = false; setup(new ModelFactoryRegistry(List.of(okFactory, failingFactory))); Collection<LocalSession> sessions = sessionRepository.getLocalSessions(); try { deploy(); fail("deployment should have failed"); } catch (InvalidApplicationException e) { assertEquals(sessions, sessionRepository.getLocalSessions()); } }
public void addIndexes(int maxIndex, int[] dictionaryIndexes, int indexCount) { if (indexCount == 0 && indexRetainedBytes > 0) { // Ignore empty segment, since there are other segments present. return; } checkState(maxIndex >= lastMaxIndex, "LastMax is greater than the current max"); lastMaxIndex = maxIndex; if (maxIndex <= Byte.MAX_VALUE) { byte[] byteIndexes = new byte[indexCount]; for (int i = 0; i < indexCount; i++) { byteIndexes[i] = (byte) dictionaryIndexes[i]; } appendByteIndexes(byteIndexes); } else if (maxIndex <= Short.MAX_VALUE) { short[] shortIndexes = new short[indexCount]; for (int i = 0; i < indexCount; i++) { shortIndexes[i] = (short) dictionaryIndexes[i]; } appendShortIndexes(shortIndexes); } else { int[] intIndexes = Arrays.copyOf(dictionaryIndexes, indexCount); appendIntegerIndexes(intIndexes); } }
@Test public void testShortIndexes() { int[] dictionaryIndexes = createIndexArray(Short.MAX_VALUE + 1, MAX_DICTIONARY_INDEX); for (int length : ImmutableList.of(0, 10, dictionaryIndexes.length)) { DictionaryRowGroupBuilder rowGroupBuilder = new DictionaryRowGroupBuilder(); rowGroupBuilder.addIndexes(Short.MAX_VALUE, dictionaryIndexes, length); short[] shortIndexes = getShortIndexes(rowGroupBuilder); assertEquals(length, shortIndexes.length); for (int i = 0; i < length; i++) { assertEquals(dictionaryIndexes[i], shortIndexes[i]); } } }
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) { return decoder.decodeFunctionResult(rawInput, outputParameters); }
@Test public void testDecodeMultipleStringValues() { Function function = new Function( "function", Collections.<Type>emptyList(), Arrays.asList( new TypeReference<Utf8String>() {}, new TypeReference<Utf8String>() {}, new TypeReference<Utf8String>() {}, new TypeReference<Utf8String>() {})); assertEquals( FunctionReturnDecoder.decode( "0x0000000000000000000000000000000000000000000000000000000000000080" + "00000000000000000000000000000000000000000000000000000000000000c0" + "0000000000000000000000000000000000000000000000000000000000000100" + "0000000000000000000000000000000000000000000000000000000000000140" + "0000000000000000000000000000000000000000000000000000000000000004" + "6465663100000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000004" + "6768693100000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000004" + "6a6b6c3100000000000000000000000000000000000000000000000000000000" + "0000000000000000000000000000000000000000000000000000000000000004" + "6d6e6f3200000000000000000000000000000000000000000000000000000000", function.getOutputParameters()), (Arrays.asList( new Utf8String("def1"), new Utf8String("ghi1"), new Utf8String("jkl1"), new Utf8String("mno2")))); }
@Override public Object getInitialAggregatedValue(Object rawValue) { Union thetaUnion = _setOperationBuilder.buildUnion(); if (rawValue instanceof byte[]) { // Serialized Sketch byte[] bytes = (byte[]) rawValue; Sketch sketch = deserializeAggregatedValue(bytes); thetaUnion.union(sketch); } else if (rawValue instanceof byte[][]) { // Multiple Serialized Sketches byte[][] serializedSketches = (byte[][]) rawValue; for (byte[] sketchBytes : serializedSketches) { thetaUnion.union(deserializeAggregatedValue(sketchBytes)); } } else { singleItemUpdate(thetaUnion, rawValue); } _maxByteSize = Math.max(_maxByteSize, thetaUnion.getCurrentBytes()); return thetaUnion; }
@Test public void getInitialValueShouldSupportDifferentTypes() { DistinctCountThetaSketchValueAggregator agg = new DistinctCountThetaSketchValueAggregator(); assertEquals(toSketch(agg.getInitialAggregatedValue(12345)).getEstimate(), 1.0); assertEquals(toSketch(agg.getInitialAggregatedValue(12345L)).getEstimate(), 1.0); assertEquals(toSketch(agg.getInitialAggregatedValue(12.345f)).getEstimate(), 1.0); assertEquals(toSketch(agg.getInitialAggregatedValue(12.345d)).getEstimate(), 1.0); assertThrows(() -> agg.getInitialAggregatedValue(new Object())); }
@Override public long getRef(String uuid) { Long ref = refsByUuid.get(requireNonNull(uuid, "uuid can not be null")); checkState(ref != null, "No reference registered in the repository for uuid '%s'", uuid); return ref; }
@Test public void getRef_throws_NPE_if_uuid_is_null() { assertThatThrownBy(() -> underTest.getRef(null)) .isInstanceOf(NullPointerException.class) .hasMessage("uuid can not be null"); }
@Override public FieldValueSuggestionMode convertFrom(final String value) { try { return FieldValueSuggestionMode.valueOf(value.trim().toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException e) { throw new ParameterException("Parameter should have one of the allowed values: " + Arrays.toString(FieldValueSuggestionMode.values()) + " (found: " + value + ")"); } }
@Test void convertFrom() { Assertions.assertThat(converter.convertFrom("ON")).isEqualTo(FieldValueSuggestionMode.ON); Assertions.assertThat(converter.convertFrom("OFF")).isEqualTo(FieldValueSuggestionMode.OFF); Assertions.assertThat(converter.convertFrom("TEXTUAL_ONLY")).isEqualTo(FieldValueSuggestionMode.TEXTUAL_ONLY); // case sensitivity Assertions.assertThat(converter.convertFrom("on")).isEqualTo(FieldValueSuggestionMode.ON); // whitespace around Assertions.assertThat(converter.convertFrom(" on ")).isEqualTo(FieldValueSuggestionMode.ON); Assertions.assertThatThrownBy(() -> converter.convertFrom("nonsence")) .isInstanceOf(ParameterException.class) .hasMessageContaining("Parameter should have one of the allowed values"); }
@Override public int getIndegree(int vertex) { int degree = 0; int n = graph.length; for (int i = 0; i < n; i++) { if (hasEdge(i, vertex)) { degree++; } } return degree; }
@Test public void testGetIndegree() { System.out.println("getIndegree"); assertEquals(0, g1.getIndegree(1)); assertEquals(1, g2.getIndegree(1)); g2.addEdge(1, 1); assertEquals(2, g2.getIndegree(1)); assertEquals(2, g3.getIndegree(1)); assertEquals(2, g3.getIndegree(2)); assertEquals(2, g3.getIndegree(3)); assertEquals(1, g4.getIndegree(4)); assertEquals(0, g5.getIndegree(1)); assertEquals(1, g6.getIndegree(1)); g6.addEdge(1, 1); assertEquals(2, g6.getIndegree(1)); assertEquals(2, g7.getIndegree(1)); assertEquals(2, g7.getIndegree(2)); assertEquals(2, g7.getIndegree(3)); assertEquals(2, g8.getIndegree(4)); }
@SuppressWarnings("deprecation") @Override public Integer call() throws Exception { super.call(); try (var files = Files.walk(directory)) { List<String> flows = files .filter(Files::isRegularFile) .filter(YamlFlowParser::isValidExtension) .map(path -> { try { return IncludeHelperExpander.expand(Files.readString(path, Charset.defaultCharset()), path.getParent()); } catch (IOException e) { throw new RuntimeException(e); } }) .toList(); String body = ""; if (flows.isEmpty()) { stdOut("No flow found on '{}'", directory.toFile().getAbsolutePath()); } else { body = String.join("\n---\n", flows); } try(DefaultHttpClient client = client()) { MutableHttpRequest<String> request = HttpRequest .POST(apiUri("/flows/") + namespace + "?delete=" + delete, body).contentType(MediaType.APPLICATION_YAML); List<UpdateResult> updated = client.toBlocking().retrieve( this.requestOptions(request), Argument.listOf(UpdateResult.class) ); stdOut(updated.size() + " flow(s) for namespace '" + namespace + "' successfully updated !"); updated.forEach(flow -> stdOut("- " + flow.getNamespace() + "." + flow.getId())); } catch (HttpClientResponseException e){ FlowValidateCommand.handleHttpException(e, "flow"); return 1; } } catch (ConstraintViolationException e) { FlowValidateCommand.handleException(e, "flow"); return 1; } return 0; }
@Test void invalid() { URL directory = FlowNamespaceUpdateCommandTest.class.getClassLoader().getResource("invalids"); ByteArrayOutputStream out = new ByteArrayOutputStream(); System.setErr(new PrintStream(out)); try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) { EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class); embeddedServer.start(); String[] args = { "--server", embeddedServer.getURL().toString(), "--user", "myuser:pass:word", "io.kestra.tests", directory.getPath(), }; Integer call = PicocliRunner.call(FlowNamespaceUpdateCommand.class, ctx, args); assertThat(call, is(1)); assertThat(out.toString(), containsString("Unable to parse flows")); assertThat(out.toString(), containsString("must not be empty")); } }
public static String email(String email) { if (StrUtil.isBlank(email)) { return StrUtil.EMPTY; } int index = StrUtil.indexOf(email, '@'); if (index <= 1) { return email; } return StrUtil.hide(email, 1, index); }
@Test public void emailTest() { assertEquals("d********@126.com", DesensitizedUtil.email("duandazhi@126.com")); assertEquals("d********@gmail.com.cn", DesensitizedUtil.email("duandazhi@gmail.com.cn")); assertEquals("d*************@gmail.com.cn", DesensitizedUtil.email("duandazhi-jack@gmail.com.cn")); }
public MetricSampleCompleteness<G, E> completeness(long from, long to, AggregationOptions<G, E> options) { _windowRollingLock.lock(); try { long fromWindowIndex = Math.max(windowIndex(from), _oldestWindowIndex); long toWindowIndex = Math.min(windowIndex(to), _currentWindowIndex - 1); if (fromWindowIndex > _currentWindowIndex || toWindowIndex < _oldestWindowIndex) { return new MetricSampleCompleteness<>(generation(), _windowMs); } maybeUpdateAggregatorState(); return _aggregatorState.completeness(fromWindowIndex, toWindowIndex, interpretAggregationOptions(options), generation()); } finally { _windowRollingLock.unlock(); } }
@Test public void testAggregationOption6() { MetricSampleAggregator<String, IntegerEntity> aggregator = prepareCompletenessTestEnv(); // Change the option to use entity group granularity and reduce the minValidEntityRatio to 0.3. This will // include ENTITY3 except in window 3, 4, 20. AggregationOptions<String, IntegerEntity> options = new AggregationOptions<>(0.3, 0.0, NUM_WINDOWS, 5, new HashSet<>(Arrays.asList(ENTITY1, ENTITY2, ENTITY3)), AggregationOptions.Granularity.ENTITY_GROUP, true); MetricSampleCompleteness<String, IntegerEntity> completeness = aggregator.completeness(-1, Long.MAX_VALUE, options); assertEquals(17, completeness.validWindowIndices().size()); assertFalse(completeness.validWindowIndices().contains(3L)); assertFalse(completeness.validWindowIndices().contains(4L)); assertFalse(completeness.validWindowIndices().contains(20L)); assertEquals(1, completeness.validEntities().size()); assertTrue(completeness.validEntities().contains(ENTITY3)); assertEquals(1, completeness.validEntityGroups().size()); assertTrue(completeness.validEntityGroups().contains(ENTITY3.group())); assertCompletenessByWindowIndex(completeness); }
@Override public void trash(final Local file) throws LocalAccessDeniedException { if(log.isDebugEnabled()) { log.debug(String.format("Move %s to Trash", file)); } final ObjCObjectByReference error = new ObjCObjectByReference(); if(!NSFileManager.defaultManager().trashItemAtURL_resultingItemURL_error( NSURL.fileURLWithPath(file.getAbsolute()), null, error)) { final NSError f = error.getValueAs(NSError.class); if(null == f) { throw new LocalAccessDeniedException(file.getAbsolute()); } throw new LocalAccessDeniedException(String.format("%s", f.localizedDescription())); } }
@Test public void testTrashOpenFile() throws Exception { final Trash trash = new FileManagerTrashFeature(); final SupportDirectoryFinder finder = new TemporarySupportDirectoryFinder(); final Local temp = finder.find(); final Local directory = LocalFactory.get(temp, UUID.randomUUID().toString()); directory.mkdir(); final Local sub = LocalFactory.get(directory, UUID.randomUUID().toString()); sub.mkdir(); final Local file = LocalFactory.get(sub, UUID.randomUUID().toString()); final Touch touch = LocalTouchFactory.get(); touch.touch(file); try (final OutputStream stream = file.getOutputStream(false)) { trash.trash(directory); } }
public boolean hasViewAccessToTemplate(PipelineTemplateConfig template, CaseInsensitiveString username, List<Role> roles, boolean isGroupAdministrator) { boolean hasViewAccessToTemplate = template.getAuthorization().isViewUser(username, roles); hasViewAccessToTemplate = hasViewAccessToTemplate || (template.isAllowGroupAdmins() && isGroupAdministrator); return hasViewAccessToTemplate; }
@Test public void shouldReturnTrueIfGroupAdminCanViewTemplate() { CaseInsensitiveString templateViewUser = new CaseInsensitiveString("view"); String templateName = "template"; PipelineTemplateConfig template = PipelineTemplateConfigMother.createTemplate(templateName, StageConfigMother.manualStage("stage")); TemplatesConfig templates = new TemplatesConfig(template); assertThat(templates.hasViewAccessToTemplate(template, templateViewUser, null, true), is(true)); }
@Override public boolean contains(String clientId) { return connectionBasedClientManager.contains(clientId) || ephemeralIpPortClientManager.contains(clientId) || persistentIpPortClientManager.contains(clientId); }
@Test void testContainsEphemeralIpPortId() { assertTrue(delegate.contains(ephemeralIpPortId)); }
@Override public String getTableAlias() { return ast.getAlias(); }
@Test public void testGetTableAlias() { String sql = "INSERT INTO t (id) values (?)"; SQLStatement ast = getSQLStatement(sql); SqlServerInsertRecognizer recognizer = new SqlServerInsertRecognizer(sql, ast); Assertions.assertNull(recognizer.getTableAlias()); sql = "INSERT INTO t t1 (id) values (?)"; ast = getSQLStatement(sql); recognizer = new SqlServerInsertRecognizer(sql, ast); Assertions.assertEquals("t1", recognizer.getTableAlias()); }
@Override public ValueMetaInterface searchValueMeta( String valueName ) { lock.writeLock().lock(); try { Integer index = indexOfValue( valueName ); if ( index < 0 ) { return null; } return valueMetaList.get( index ); } finally { lock.writeLock().unlock(); } }
@Test public void testExternalValueMetaModification() { ValueMetaInterface vmi = rowMeta.searchValueMeta( "string" ); vmi.setName( "string2" ); assertNotNull( rowMeta.searchValueMeta( vmi.getName() ) ); }
public StatementExecutorResponse execute( final ConfiguredStatement<? extends Statement> statement, final KsqlExecutionContext executionContext, final KsqlSecurityContext securityContext ) { final String commandRunnerWarningString = commandRunnerWarning.get(); if (!commandRunnerWarningString.equals("")) { throw new KsqlServerException("Failed to handle Ksql Statement." + System.lineSeparator() + commandRunnerWarningString); } final InjectorWithSideEffects injector = InjectorWithSideEffects.wrap( injectorFactory.apply(executionContext, securityContext.getServiceContext())); final ConfiguredStatementWithSideEffects<?> injectedWithSideEffects = injector.injectWithSideEffects(statement); try { return executeInjected( injectedWithSideEffects.getStatement(), statement, executionContext, securityContext); } catch (Exception e) { injector.revertSideEffects(injectedWithSideEffects); throw e; } }
@Test public void shouldAbortOnError_Exception() { // When: doThrow(new RuntimeException("Error!")).when(transactionalProducer).commitTransaction(); final KsqlStatementException e = assertThrows( KsqlStatementException.class, () -> distributor.execute(CONFIGURED_STATEMENT, executionContext, securityContext) ); assertThat(e.getMessage(), containsString("Could not write the statement into the command topic.")); assertThat(e.getUnloggedMessage(), containsString("Could not write the statement " + "'statement' into the command topic.")); assertThat(e.getSqlStatement(), containsString("statement")); // Then: verify(queue).abortCommand(IDGEN.getCommandId(CONFIGURED_STATEMENT.getStatement())); }
public boolean fence(HAServiceTarget fromSvc) { return fence(fromSvc, null); }
@Test public void testArglessFencer() throws BadFencingConfigurationException { NodeFencer fencer = setupFencer( AlwaysSucceedFencer.class.getName()); assertTrue(fencer.fence(MOCK_TARGET)); // One call to each, since top fencer fails assertEquals(1, AlwaysSucceedFencer.fenceCalled); assertSame(MOCK_TARGET, AlwaysSucceedFencer.fencedSvc); assertEquals(null, AlwaysSucceedFencer.callArgs.get(0)); }
private static RuntimeException invalidFile(int lineNumber, String message, Throwable cause) { return new PrestoException(CONFIGURATION_INVALID, format("Error in password file line %s: %s", lineNumber, message), cause); }
@Test public void testInvalidFile() { assertThatThrownBy(() -> createStore("", "junk")) .hasMessage("Error in password file line 2: Expected two parts for user and password"); assertThatThrownBy(() -> createStore("abc:" + BCRYPT_PASSWORD, "xyz:" + BCRYPT_PASSWORD, "abc:" + PBKDF2_PASSWORD)) .hasMessage("Error in password file line 3: Duplicate user: abc"); assertThatThrownBy(() -> createStore("x:x")) .hasMessage("Error in password file line 1: Password hashing algorithm cannot be determined"); assertThatThrownBy(() -> createStore("x:$2y$xxx")) .hasMessage("Error in password file line 1: Invalid BCrypt password"); assertThatThrownBy(() -> createStore("x:x:x")) .hasMessage("Error in password file line 1: Invalid PBKDF2 password"); }
@Override public boolean apply(InputFile inputFile) { return originalPredicate.apply(inputFile) && InputFile.Status.SAME != inputFile.status(); }
@Test public void do_not_apply_when_file_is_same_and_predicate_is_false() { when(inputFile.status()).thenReturn(InputFile.Status.SAME); when(predicate.apply(inputFile)).thenReturn(true); Assertions.assertThat(underTest.apply(inputFile)).isFalse(); verify(predicate, times(1)).apply(any()); verify(inputFile, times(1)).status(); }
@Override public String toString() { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append('['); int numColumns = _columnNames.length; for (int i = 0; i < numColumns; i++) { stringBuilder.append(_columnNames[i]).append('(').append(_columnDataTypes[i]).append(')').append(','); } stringBuilder.setCharAt(stringBuilder.length() - 1, ']'); return stringBuilder.toString(); }
@Test public void testToString() { DataSchema dataSchema = new DataSchema(COLUMN_NAMES, COLUMN_DATA_TYPES); Assert.assertEquals(dataSchema.toString(), "[int(INT),long(LONG),float(FLOAT),double(DOUBLE),string(STRING),object(OBJECT),int_array(INT_ARRAY)," + "long_array(LONG_ARRAY),float_array(FLOAT_ARRAY),double_array(DOUBLE_ARRAY),string_array(STRING_ARRAY)," + "boolean_array(BOOLEAN_ARRAY),timestamp_array(TIMESTAMP_ARRAY),bytes_array(BYTES_ARRAY)]"); }
@Override public ObjectNode encode(MappingAction action, CodecContext context) { EncodeMappingActionCodecHelper encoder = new EncodeMappingActionCodecHelper(action, context); return encoder.encode(); }
@Test public void noActionTest() { final NoMappingAction action = MappingActions.noAction(); final ObjectNode actionJson = actionCodec.encode(action, context); assertThat(actionJson, matchesAction(action)); }
@Override public boolean supports(FailureBatch failureBatch) { return failureBatch.containsIndexingFailures(); }
@Test public void supports_processingFailuresNotSupported() { assertThat(underTest.supports(FailureBatch.processingFailureBatch(new ArrayList<>()))).isFalse(); }
@Override @NotNull public BTreeMutable getMutableCopy() { final BTreeMutable result = new BTreeMutable(this); result.addExpiredLoggable(rootLoggable); return result; }
@Test public void testPutOverwriteTreeWithDuplicates() { // add existing key to tree that supports duplicates tm = new BTreeEmpty(log, createTestSplittingPolicy(), true, 1).getMutableCopy(); for (int i = 0; i < 100; i++) { getTreeMutable().put(kv(i, "v" + i)); } checkTree(getTreeMutable(), 100).run(); // put must add 100 new values for (int i = 0; i < 100; i++) { final INode ln = kv(i, "vv" + i); getTreeMutable().put(ln); } // expected nodes List<INode> l = new ArrayList<>(); for (int i = 0; i < 100; i++) { l.add(kv(i, "v" + i)); l.add(kv(i, "vv" + i)); } assertMatchesIterator(tm, l); long rootAddress = saveTree(); assertMatchesIterator(tm, l); reopen(); t = new BTree(log, rootAddress, true, 1); assertMatchesIterator(tm, l); }
@Override public PublishArtifactResponse publishArtifactResponse(String responseBody) { return PublishArtifactResponse.fromJSON(responseBody); }
@Test public void publishArtifactResponse_shouldDeserializeFromJson() { final ArtifactMessageConverterV2 converter = new ArtifactMessageConverterV2(); final PublishArtifactResponse response = converter.publishArtifactResponse(""" { "metadata": { "artifact-version": "10.12.0" } }"""); MatcherAssert.assertThat(response.getMetadata().size(), is(1)); MatcherAssert.assertThat(response.getMetadata(), hasEntry("artifact-version", "10.12.0")); }
@NonNull @Override public Map<String, JsonNode> getValues() { return Objects.requireNonNull(delegateFetcher.getValues().block()); }
@Test void getValues() throws JSONException { Map<String, JsonNode> values = settingFetcher.getValues(); verify(client, times(1)).fetch(eq(ConfigMap.class), any()); assertThat(values).hasSize(2); JSONAssert.assertEquals(getSns(), JsonUtils.objectToJson(values.get("sns")), true); // The extensionClient will only be called once Map<String, JsonNode> callAgain = settingFetcher.getValues(); assertThat(callAgain).isNotNull(); verify(client, times(1)).fetch(eq(ConfigMap.class), any()); }
public static String toJsonString(Pipeline pipeline, ConfigContext ctx) { final PipelineJsonRenderer visitor = new PipelineJsonRenderer(ctx); pipeline.traverseTopologically(visitor); return visitor.jsonBuilder.toString(); }
@Test public void testCompositePipeline() throws IOException { SamzaPipelineOptions options = PipelineOptionsFactory.create().as(SamzaPipelineOptions.class); options.setRunner(SamzaRunner.class); options.setJobName("TestEnvConfig"); options.setSamzaExecutionEnvironment(SamzaExecutionEnvironment.LOCAL); Pipeline p = Pipeline.create(options); p.apply( Create.timestamped( TimestampedValue.of(KV.of(1, 1), new Instant(1)), TimestampedValue.of(KV.of(2, 2), new Instant(2)))) .apply(Window.into(FixedWindows.of(Duration.millis(10)))) .apply(Sum.integersPerKey()); p.replaceAll(SamzaTransformOverrides.getDefaultOverrides()); final Map<PValue, String> idMap = PViewToIdMapper.buildIdMap(p); final Set<String> nonUniqueStateIds = StateIdParser.scan(p); final ConfigContext ctx = new ConfigContext(idMap, nonUniqueStateIds, options); String jsonDagFileName = "src/test/resources/ExpectedDag.json"; String jsonDag = new String(Files.readAllBytes(Paths.get(jsonDagFileName)), StandardCharsets.UTF_8); String renderedDag = PipelineJsonRenderer.toJsonString(p, ctx); assertEquals( JsonParser.parseString(jsonDag), JsonParser.parseString(renderedDag.replaceAll(System.lineSeparator(), ""))); }
@Override public Object handle(ProceedingJoinPoint proceedingJoinPoint, TimeLimiter timeLimiter, String methodName) throws Throwable { TimeLimiterTransformer<?> timeLimiterTransformer = TimeLimiterTransformer.of(timeLimiter); Object returnValue = proceedingJoinPoint.proceed(); return executeRxJava2Aspect(timeLimiterTransformer, returnValue, methodName); }
@Test public void shouldThrowIllegalArgumentExceptionWithNotRxJava2Type() throws Throwable{ TimeLimiter timeLimiter = TimeLimiter.ofDefaults("test"); when(proceedingJoinPoint.proceed()).thenReturn("NOT RXJAVA2 TYPE"); try { rxJava2TimeLimiterAspectExt.handle(proceedingJoinPoint, timeLimiter, "testMethod"); fail("exception missed"); } catch (Throwable e) { assertThat(e).isInstanceOf(IllegalReturnTypeException.class) .hasMessage( "java.lang.String testMethod has unsupported by @TimeLimiter return type. RxJava2 expects Flowable/Single/..."); } }
public static int getWaitInSecond(String wait) { int w = 600; if(wait.endsWith("s")) { w = Integer.valueOf(wait.substring(0, wait.length() - 1)); } else if (wait.endsWith("m")) { w = Integer.valueOf(wait.substring(0, wait.length() - 1)) * 60; } return w; }
@Test public void testGetWaitInSecond() { String w1 = "600s"; String w2 = "10m"; int w = ConsulUtils.getWaitInSecond(w1); Assert.assertEquals(600, w); w = ConsulUtils.getWaitInSecond(w2); Assert.assertEquals(600, w); }
public static Optional<Expression> convert( org.apache.flink.table.expressions.Expression flinkExpression) { if (!(flinkExpression instanceof CallExpression)) { return Optional.empty(); } CallExpression call = (CallExpression) flinkExpression; Operation op = FILTERS.get(call.getFunctionDefinition()); if (op != null) { switch (op) { case IS_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::isNull); case NOT_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::notNull); case LT: return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call); case LT_EQ: return convertFieldAndLiteral( Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call); case GT: return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call); case GT_EQ: return convertFieldAndLiteral( Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call); case EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.isNaN(ref); } else { return Expressions.equal(ref, lit); } }, call); case NOT_EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.notNaN(ref); } else { return Expressions.notEqual(ref, lit); } }, call); case NOT: return onlyChildAs(call, CallExpression.class) .flatMap(FlinkFilters::convert) .map(Expressions::not); case AND: return convertLogicExpression(Expressions::and, call); case OR: return convertLogicExpression(Expressions::or, call); case STARTS_WITH: return convertLike(call); } } return Optional.empty(); }
@Test public void testOr() { Expression expr = resolve( Expressions.$("field1") .isEqual(Expressions.lit(1)) .or(Expressions.$("field2").isEqual(Expressions.lit(2L)))); Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert(expr); assertThat(actual).isPresent(); Or or = (Or) actual.get(); Or expected = (Or) org.apache.iceberg.expressions.Expressions.or( org.apache.iceberg.expressions.Expressions.equal("field1", 1), org.apache.iceberg.expressions.Expressions.equal("field2", 2L)); assertPredicatesMatch(expected.left(), or.left()); assertPredicatesMatch(expected.right(), or.right()); }
@Override public EdgeIteratorState getEdgeIteratorState(int edgeId, int adjNode) { EdgeIteratorStateImpl edge = new EdgeIteratorStateImpl(this); if (edge.init(edgeId, adjNode)) return edge; // if edgeId exists but adjacent nodes do not match return null; }
@Test public void outOfBounds() { BaseGraph graph = createGHStorage(); assertThrows(IllegalArgumentException.class, () -> graph.getEdgeIteratorState(0, Integer.MIN_VALUE)); }
public static RowCoder of(Schema schema) { return new RowCoder(schema); }
@Test public void testPrimitiveTypes() throws Exception { Schema schema = Schema.builder() .addByteField("f_byte") .addInt16Field("f_int16") .addInt32Field("f_int32") .addInt64Field("f_int64") .addDecimalField("f_decimal") .addFloatField("f_float") .addDoubleField("f_double") .addStringField("f_string") .addDateTimeField("f_datetime") .addBooleanField("f_boolean") .build(); DateTime dateTime = new DateTime().withDate(1979, 03, 14).withTime(1, 2, 3, 4).withZone(DateTimeZone.UTC); Row row = Row.withSchema(schema) .addValues( (byte) 0, (short) 1, 2, 3L, new BigDecimal("2.3"), 1.2f, 3.0d, "str", dateTime, false) .build(); CoderProperties.coderDecodeEncodeEqual(RowCoder.of(schema), row); }
public void formatSource(CharSource input, CharSink output) throws FormatterException, IOException { // TODO(cushon): proper support for streaming input/output. Input may // not be feasible (parsing) but output should be easier. output.write(formatSource(input.read())); }
@Test public void testTrailingCommentWithoutTerminalNewline() throws Exception { assertThat(new Formatter().formatSource("/*\n * my comment */")) .isEqualTo("/*\n * my comment */\n"); }
public Analysis analyze(Statement statement) { return analyze(statement, false); }
@Test public void testScalarSubQuery() { analyze("SELECT 'a', (VALUES 1) GROUP BY 1"); analyze("SELECT 'a', (SELECT (1))"); analyze("SELECT * FROM t1 WHERE (VALUES 1) = 2"); analyze("SELECT * FROM t1 WHERE (VALUES 1) IN (VALUES 1)"); analyze("SELECT * FROM t1 WHERE (VALUES 1) IN (2)"); analyze("SELECT * FROM (SELECT 1) t1(x) WHERE x IN (SELECT 1)"); }
KafkaBasedLog<String, byte[]> setupAndCreateKafkaBasedLog(String topic, final WorkerConfig config) { String clusterId = config.kafkaClusterId(); Map<String, Object> originals = config.originals(); Map<String, Object> producerProps = new HashMap<>(baseProducerProps); producerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); Map<String, Object> consumerProps = new HashMap<>(originals); consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); consumerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); ConnectUtils.addMetricsContextProperties(consumerProps, config, clusterId); if (config.exactlyOnceSourceEnabled()) { ConnectUtils.ensureProperty( consumerProps, ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.toString(), "for the worker's config topic consumer when exactly-once source support is enabled", true ); } Map<String, Object> adminProps = new HashMap<>(originals); ConnectUtils.addMetricsContextProperties(adminProps, config, clusterId); adminProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); Map<String, Object> topicSettings = config instanceof DistributedConfig ? ((DistributedConfig) config).configStorageTopicSettings() : Collections.emptyMap(); NewTopic topicDescription = TopicAdmin.defineTopic(topic) .config(topicSettings) // first so that we override user-supplied settings as needed .compacted() .partitions(1) .replicationFactor(config.getShort(DistributedConfig.CONFIG_STORAGE_REPLICATION_FACTOR_CONFIG)) .build(); return createKafkaBasedLog(topic, producerProps, consumerProps, new ConsumeCallback(), topicDescription, topicAdminSupplier, config, time); }
@Test public void testConsumerPropertiesDoNotOverrideUserSuppliedValuesWithoutExactlyOnceSourceEnabled() { props.put(EXACTLY_ONCE_SOURCE_SUPPORT_CONFIG, "preparing"); props.put(ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_UNCOMMITTED.toString()); createStore(); configStorage.setupAndCreateKafkaBasedLog(TOPIC, config); verifyConfigure(); assertEquals( IsolationLevel.READ_UNCOMMITTED.toString(), capturedConsumerProps.getValue().get(ISOLATION_LEVEL_CONFIG) ); }
public CuratorFramework getClient() { return client; }
@Test void getClient() { CuratorFramework curatorFramework = client.getClient(); assertNotNull(curatorFramework); }
@Override public String authenticate(AuthenticationDataSource authData) throws AuthenticationException { return applyAuthProcessor( providers, provider -> provider.authenticate(authData) ); }
@Test public void testAuthenticate() throws Exception { testAuthenticate(tokenAA, SUBJECT_A); testAuthenticate(tokenAB, SUBJECT_B); testAuthenticate(tokenBA, SUBJECT_A); testAuthenticate(tokenBB, SUBJECT_B); }
@Override public Table buildTable(final PropertiesList entity) { return new Builder() .withColumnHeaders(HEADERS) .withRows(defRowValues(propertiesListWithOverrides(entity))) .build(); }
@Test public void shouldHandleClientOverwrittenProperties() { // Given: final PropertiesList propList = new PropertiesList("list properties;", ImmutableList.of(new Property(SOME_KEY, "KSQL", "earliest")), ImmutableList.of(SOME_KEY), Collections.emptyList() ); // When: final Table table = builder.buildTable(propList); // Then: assertThat(getRows(table), contains(row(SOME_KEY, "KSQL", "SESSION", "earliest"))); }
@Override public int run(String launcherVersion, String launcherMd5, ServerUrlGenerator urlGenerator, Map<String, String> env, Map<String, String> context) { int exitValue = 0; LOG.info("Agent launcher is version: {}", CurrentGoCDVersion.getInstance().fullVersion()); String[] command = new String[]{}; try { AgentBootstrapperArgs bootstrapperArgs = AgentBootstrapperArgs.fromProperties(context); ServerBinaryDownloader agentDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); agentDownloader.downloadIfNecessary(DownloadableFile.AGENT); ServerBinaryDownloader pluginZipDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); pluginZipDownloader.downloadIfNecessary(DownloadableFile.AGENT_PLUGINS); ServerBinaryDownloader tfsImplDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); tfsImplDownloader.downloadIfNecessary(DownloadableFile.TFS_IMPL); command = agentInvocationCommand(agentDownloader.getMd5(), launcherMd5, pluginZipDownloader.getMd5(), tfsImplDownloader.getMd5(), env, context, agentDownloader.getExtraProperties()); LOG.info("Launching Agent with command: {}", join(command, " ")); Process agent = invoke(command); // The next lines prevent the child process from blocking on Windows AgentOutputAppender agentOutputAppenderForStdErr = new AgentOutputAppender(GO_AGENT_STDERR_LOG); AgentOutputAppender agentOutputAppenderForStdOut = new AgentOutputAppender(GO_AGENT_STDOUT_LOG); if (new SystemEnvironment().consoleOutToStdout()) { agentOutputAppenderForStdErr.writeTo(AgentOutputAppender.Outstream.STDERR); agentOutputAppenderForStdOut.writeTo(AgentOutputAppender.Outstream.STDOUT); } agent.getOutputStream().close(); AgentConsoleLogThread stdErrThd = new AgentConsoleLogThread(agent.getErrorStream(), agentOutputAppenderForStdErr); stdErrThd.start(); AgentConsoleLogThread stdOutThd = new AgentConsoleLogThread(agent.getInputStream(), agentOutputAppenderForStdOut); stdOutThd.start(); Shutdown shutdownHook = new Shutdown(agent); Runtime.getRuntime().addShutdownHook(shutdownHook); try { exitValue = agent.waitFor(); } catch (InterruptedException ie) { LOG.error("Agent was interrupted. Terminating agent and respawning. {}", ie.toString()); agent.destroy(); } finally { removeShutdownHook(shutdownHook); stdErrThd.stopAndJoin(); stdOutThd.stopAndJoin(); } } catch (Exception e) { LOG.error("Exception while executing command: {} - {}", join(command, " "), e.toString()); exitValue = EXCEPTION_OCCURRED; } return exitValue; }
@Test public void shouldAddAnyExtraPropertiesFoundToTheAgentInvocation() throws InterruptedException { final List<String> cmd = new ArrayList<>(); String expectedAgentMd5 = TEST_AGENT.getMd5(); String expectedAgentPluginsMd5 = TEST_AGENT_PLUGINS.getMd5(); String expectedTfsMd5 = TEST_TFS_IMPL.getMd5(); server.setExtraPropertiesHeaderValue("extra.property=value1%20with%20space extra%20property%20with%20space=value2%20with%20space"); AgentProcessParentImpl bootstrapper = createBootstrapper(cmd); int returnCode = bootstrapper.run("launcher_version", "bar", getURLGenerator(), new HashMap<>(), context()); assertThat(returnCode, is(42)); assertThat(cmd.toArray(new String[]{}), equalTo(new String[]{ (getProperty("java.home") + FileSystems.getDefault().getSeparator() + "bin" + FileSystems.getDefault().getSeparator() + "java"), "-Dextra.property=value1 with space", "-Dextra property with space=value2 with space", "-Dagent.plugins.md5=" + expectedAgentPluginsMd5, "-Dagent.binary.md5=" + expectedAgentMd5, "-Dagent.launcher.md5=bar", "-Dagent.tfs.md5=" + expectedTfsMd5, "-Dagent.bootstrapper.version=UNKNOWN", "-jar", "agent.jar", "-serverUrl", "http://localhost:" + server.getPort() + "/go/", "-sslVerificationMode", "NONE", "-rootCertFile", new File("/path/to/cert.pem").getAbsolutePath() })); }
private GetRegisterLeasePResponse acquireRegisterLease( final long workerId, final int estimatedBlockCount) throws IOException { return retryRPC(() -> { LOG.info("Requesting lease with workerId {}, blockCount {}", workerId, estimatedBlockCount); return mClient.requestRegisterLease(GetRegisterLeasePRequest.newBuilder() .setWorkerId(workerId).setBlockCount(estimatedBlockCount).build()); }, LOG, "GetRegisterLease", "workerId=%d, estimatedBlockCount=%d", workerId, estimatedBlockCount); }
@Test public void acquireRegisterLeaseSuccess() throws Exception { acquireRegisterLease(true); }
public static ViewMetadata fromJson(String metadataLocation, String json) { return JsonUtil.parse(json, node -> ViewMetadataParser.fromJson(metadataLocation, node)); }
@Test public void failReadingViewMetadataMissingVersion() throws Exception { String json = readViewMetadataInputFile("org/apache/iceberg/view/ViewMetadataMissingCurrentVersion.json"); assertThatThrownBy(() -> ViewMetadataParser.fromJson(json)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse missing int: current-version-id"); }
@Override public ExportResult<ContactsModelWrapper> export( UUID jobId, TokensAndUrlAuthData authData, Optional<ExportInformation> exportInformation) { if (exportInformation.isPresent()) { StringPaginationToken stringPaginationToken = (StringPaginationToken) exportInformation.get().getPaginationData(); return exportContacts(authData, Optional.ofNullable(stringPaginationToken)); } else { return exportContacts(authData, Optional.empty()); } }
@Test public void exportSubsequentPage() throws IOException { setUpSinglePersonResponse(); // Looking at a subsequent page, with no pages after it PaginationData paginationData = new StringPaginationToken(NEXT_PAGE_TOKEN); ExportInformation exportInformation = new ExportInformation(paginationData, null); listConnectionsResponse.setNextPageToken(null); when(listConnectionsRequest.setPageToken(NEXT_PAGE_TOKEN)).thenReturn(listConnectionsRequest); // Run test ExportResult<ContactsModelWrapper> result = contactsService.export(UUID.randomUUID(), null, Optional.of(exportInformation)); // Verify correct calls were made - i.e., token was added before execution InOrder inOrder = Mockito.inOrder(listConnectionsRequest); inOrder.verify(listConnectionsRequest).setPageToken(NEXT_PAGE_TOKEN); inOrder.verify(listConnectionsRequest).execute(); // Check continuation data ContinuationData continuationData = (ContinuationData) result.getContinuationData(); assertThat(continuationData.getContainerResources()).isEmpty(); assertThat(continuationData.getPaginationData()).isNull(); }
@Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void awaitTermination() throws Exception { newManagedExecutorService().awaitTermination(1, TimeUnit.SECONDS); }
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { final long timestamp = clock.getTime() / 1000; // oh it'd be lovely to use Java 7 here try { graphite.connect(); for (Map.Entry<String, Gauge> entry : gauges.entrySet()) { reportGauge(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Meter> entry : meters.entrySet()) { reportMetered(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue(), timestamp); } graphite.flush(); } catch (IOException e) { LOGGER.warn("Unable to report to Graphite", graphite, e); } finally { try { graphite.close(); } catch (IOException e1) { LOGGER.warn("Error closing Graphite", graphite, e1); } } }
@Test public void closesConnectionIfGraphiteIsUnavailable() throws Exception { doThrow(new UnknownHostException("UNKNOWN-HOST")).when(graphite).connect(); reporter.report(map("gauge", gauge(1)), map(), map(), map(), map()); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); }