focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
static CommandLineOptions parse(Iterable<String> options) { CommandLineOptions.Builder optionsBuilder = CommandLineOptions.builder(); List<String> expandedOptions = new ArrayList<>(); expandParamsFiles(options, expandedOptions); Iterator<String> it = expandedOptions.iterator(); while (it.hasNext()) { String option = it.next(); if (!option.startsWith("-")) { optionsBuilder.filesBuilder().add(option).addAll(it); break; } String flag; String value; int idx = option.indexOf('='); if (idx >= 0) { flag = option.substring(0, idx); value = option.substring(idx + 1); } else { flag = option; value = null; } // NOTE: update usage information in UsageException when new flags are added switch (flag) { case "-i": case "-r": case "-replace": case "--replace": optionsBuilder.inPlace(true); break; case "--lines": case "-lines": case "--line": case "-line": parseRangeSet(optionsBuilder.linesBuilder(), getValue(flag, it, value)); break; case "--offset": case "-offset": optionsBuilder.addOffset(parseInteger(it, flag, value)); break; case "--length": case "-length": optionsBuilder.addLength(parseInteger(it, flag, value)); break; case "--aosp": case "-aosp": case "-a": optionsBuilder.aosp(true); break; case "--version": case "-version": case "-v": optionsBuilder.version(true); break; case "--help": case "-help": case "-h": optionsBuilder.help(true); break; case "--fix-imports-only": optionsBuilder.fixImportsOnly(true); break; case "--skip-sorting-imports": optionsBuilder.sortImports(false); break; case "--skip-removing-unused-imports": optionsBuilder.removeUnusedImports(false); break; case "--skip-reflowing-long-strings": optionsBuilder.reflowLongStrings(false); break; case "--skip-javadoc-formatting": optionsBuilder.formatJavadoc(false); break; case "-": optionsBuilder.stdin(true); break; case "-n": case "--dry-run": optionsBuilder.dryRun(true); break; case "--set-exit-if-changed": optionsBuilder.setExitIfChanged(true); break; case "-assume-filename": case "--assume-filename": optionsBuilder.assumeFilename(getValue(flag, it, value)); break; default: throw new IllegalArgumentException("unexpected flag: " + flag); } } return optionsBuilder.build(); }
@Test public void skipReflowLongStrings() { assertThat( CommandLineOptionsParser.parse(Arrays.asList("--skip-reflowing-long-strings")) .reflowLongStrings()) .isFalse(); }
@Override public void subscribe(String serviceName, EventListener listener) throws NacosException { subscribe(serviceName, new ArrayList<>(), listener); }
@Test void testSubscribeWithNullListener() throws NacosException { String serviceName = "service1"; String groupName = "group1"; //when client.subscribe(serviceName, groupName, null); //then verify(changeNotifier, never()).registerListener(groupName, serviceName, new NamingSelectorWrapper(NamingSelectorFactory.newIpSelector(""), null)); verify(proxy, never()).subscribe(serviceName, groupName, ""); }
@Override public List<KsqlPartitionLocation> locate( final List<KsqlKey> keys, final RoutingOptions routingOptions, final RoutingFilterFactory routingFilterFactory, final boolean isRangeScan ) { if (isRangeScan && keys.isEmpty()) { throw new IllegalStateException("Query is range scan but found no range keys."); } final ImmutableList.Builder<KsqlPartitionLocation> partitionLocations = ImmutableList.builder(); final Set<Integer> filterPartitions = routingOptions.getPartitions(); final Optional<Set<KsqlKey>> keySet = keys.isEmpty() ? Optional.empty() : Optional.of(Sets.newHashSet(keys)); // Depending on whether this is a key-based lookup, determine which metadata method to use. // If we don't have keys, find the metadata for all partitions since we'll run the query for // all partitions of the state store rather than a particular one. //For issue #7174. Temporarily turn off metadata finding for a partition with keys //if there are more than one key. final List<PartitionMetadata> metadata; if (keys.size() == 1 && keys.get(0).getKey().size() == 1 && !isRangeScan) { metadata = getMetadataForKeys(keys, filterPartitions); } else { metadata = getMetadataForAllPartitions(filterPartitions, keySet); } if (metadata.isEmpty()) { final MaterializationException materializationException = new MaterializationException( "Cannot determine which host contains the required partitions to serve the pull query. \n" + "The underlying persistent query may be restarting (e.g. as a result of " + "ALTER SYSTEM) view the status of your by issuing <DESCRIBE foo>."); LOG.debug(materializationException.getMessage()); throw materializationException; } // Go through the metadata and group them by partition. for (PartitionMetadata partitionMetadata : metadata) { LOG.debug("Handling pull query for partition {} of state store {}.", partitionMetadata.getPartition(), storeName); final HostInfo activeHost = partitionMetadata.getActiveHost(); final Set<HostInfo> standByHosts = partitionMetadata.getStandbyHosts(); final int partition = partitionMetadata.getPartition(); final Optional<Set<KsqlKey>> partitionKeys = partitionMetadata.getKeys(); LOG.debug("Active host {}, standby {}, partition {}.", activeHost, standByHosts, partition); // For a given partition, find the ordered, filtered list of hosts to consider final List<KsqlNode> filteredHosts = getFilteredHosts(routingOptions, routingFilterFactory, activeHost, standByHosts, partition); partitionLocations.add(new PartitionLocation(partitionKeys, partition, filteredHosts)); } return partitionLocations.build(); }
@Test public void shouldThrowIfMetadataIsEmpty() { // Given: getActiveAndStandbyMetadata(); when(topology.describe()).thenReturn(description); when(description.subtopologies()).thenReturn(ImmutableSet.of(sub1)); when(sub1.nodes()).thenReturn(ImmutableSet.of(source, processor)); when(source.topicSet()).thenReturn(ImmutableSet.of(TOPIC_NAME)); when(processor.stores()).thenReturn(ImmutableSet.of(STORE_NAME)); // When: final Exception e = assertThrows( MaterializationException.class, () -> locator.locate(Collections.emptyList(), routingOptions, routingFilterFactoryActive, false) ); // Then: assertThat(e.getMessage(), is( "Cannot determine which host contains the required partitions to serve the pull query. \n" + "The underlying persistent query may be restarting (e.g. as a result of" + " ALTER SYSTEM) view the status of your by issuing <DESCRIBE foo>.")); }
public static void install() { installStyle( STYLE_REGULAR ); installStyle( STYLE_ITALIC ); installStyle( STYLE_BOLD ); installStyle( STYLE_BOLD_ITALIC ); }
@Test void testFont() { FlatJetBrainsMonoFont.install(); testFont( FlatJetBrainsMonoFont.FAMILY, Font.PLAIN, 13 ); testFont( FlatJetBrainsMonoFont.FAMILY, Font.ITALIC, 13 ); testFont( FlatJetBrainsMonoFont.FAMILY, Font.BOLD, 13 ); testFont( FlatJetBrainsMonoFont.FAMILY, Font.BOLD | Font.ITALIC, 13 ); }
public static boolean isValidOrigin(String sourceHost, ZeppelinConfiguration zConf) throws UnknownHostException, URISyntaxException { String sourceUriHost = ""; if (sourceHost != null && !sourceHost.isEmpty()) { sourceUriHost = new URI(sourceHost).getHost(); sourceUriHost = (sourceUriHost == null) ? "" : sourceUriHost.toLowerCase(); } sourceUriHost = sourceUriHost.toLowerCase(); String currentHost = InetAddress.getLocalHost().getHostName().toLowerCase(); return zConf.getAllowedOrigins().contains("*") || currentHost.equals(sourceUriHost) || "localhost".equals(sourceUriHost) || zConf.getAllowedOrigins().contains(sourceHost); }
@Test void nullOrigin() throws URISyntaxException, UnknownHostException { assertFalse(CorsUtils.isValidOrigin(null, ZeppelinConfiguration.load("zeppelin-site.xml"))); }
public StringSubject factValue(String key) { return doFactValue(key, null); }
@Test public void factValueIntFailNoSuchKey() { Object unused = expectFailureWhenTestingThat(fact("foo", "the foo")).factValue("bar", 0); assertFailureKeys("expected to contain fact", "but contained only"); assertFailureValue("expected to contain fact", "bar"); assertFailureValue("but contained only", "[foo]"); }
@Nonnull public static <T> Traverser<T> traverseEnumeration(@Nonnull Enumeration<T> enumeration) { return () -> enumeration.hasMoreElements() ? requireNonNull(enumeration.nextElement(), "Enumeration contains a null element") : null; }
@Test public void when_traverseEnumeration_then_seeAllItems() { validateTraversal(traverseEnumeration(new Vector<>(asList(1, 2)).elements())); }
public Statement buildStatement(final ParserRuleContext parseTree) { return build(Optional.of(getSources(parseTree)), parseTree); }
@Test public void shouldDefaultToEmitChangesForCtas() { // Given: final SingleStatementContext stmt = givenQuery("CREATE TABLE X AS SELECT COUNT(1) FROM TEST1 GROUP BY ROWKEY;"); // When: final Query result = ((QueryContainer) builder.buildStatement(stmt)).getQuery(); // Then: assertThat("Should be push", result.isPullQuery(), is(false)); assertThat(result.getRefinement().get().getOutputRefinement(), is(OutputRefinement.CHANGES)); }
@Override @Nullable public Object convert(@Nullable String value) { if (isNullOrEmpty(value)) { return null; } LOG.debug("Trying to parse date <{}> with pattern <{}>, locale <{}>, and timezone <{}>.", value, dateFormat, locale, timeZone); final DateTimeFormatter formatter; if (containsTimeZone) { formatter = DateTimeFormat .forPattern(dateFormat) .withDefaultYear(YearMonth.now(timeZone).getYear()) .withLocale(locale); } else { formatter = DateTimeFormat .forPattern(dateFormat) .withDefaultYear(YearMonth.now(timeZone).getYear()) .withLocale(locale) .withZone(timeZone); } return DateTime.parse(value, formatter); }
@Test public void convertUsesEnglishIfLocaleIsNull() throws Exception { final Converter c = new DateConverter(config("dd/MMM/YYYY HH:mm:ss Z", null, null)); final DateTime dateTime = (DateTime) c.convert("11/May/2017 15:10:48 +0200"); assertThat(dateTime).isEqualTo("2017-05-11T13:10:48.000Z"); }
protected <T> Collection<String> getInvokerAddrList(List<Invoker<T>> invokers, Invocation invocation) { String key = invokers.get(0).getUrl().getServiceKey() + "." + RpcUtils.getMethodName(invocation); Map<String, WeightedRoundRobin> map = methodWeightMap.get(key); if (map != null) { return map.keySet(); } return null; }
@Test void testNodeCacheShouldNotRecycle() { int loop = 10000; // tmperately add a new invoker weightInvokers.add(weightInvokerTmp); try { Map<Invoker, InvokeResult> resultMap = getWeightedInvokeResult(loop, RoundRobinLoadBalance.NAME); assertStrictWRRResult(loop, resultMap); // inner nodes cache judgement RoundRobinLoadBalance lb = (RoundRobinLoadBalance) getLoadBalance(RoundRobinLoadBalance.NAME); Assertions.assertEquals( weightInvokers.size(), lb.getInvokerAddrList(weightInvokers, weightTestInvocation).size()); weightInvokers.remove(weightInvokerTmp); resultMap = getWeightedInvokeResult(loop, RoundRobinLoadBalance.NAME); assertStrictWRRResult(loop, resultMap); Assertions.assertNotEquals( weightInvokers.size(), lb.getInvokerAddrList(weightInvokers, weightTestInvocation).size()); } finally { // prevent other UT's failure weightInvokers.remove(weightInvokerTmp); } }
@VisibleForTesting static void instantiateNonHeapMemoryMetrics(final MetricGroup metricGroup) { instantiateMemoryUsageMetrics( metricGroup, () -> ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage()); }
@Test void testNonHeapMetricUsageNotStatic() throws Exception { final InterceptingOperatorMetricGroup nonHeapMetrics = new InterceptingOperatorMetricGroup(); MetricUtils.instantiateNonHeapMemoryMetrics(nonHeapMetrics); @SuppressWarnings("unchecked") final Gauge<Long> used = (Gauge<Long>) nonHeapMetrics.get(MetricNames.MEMORY_USED); runUntilMetricChanged("Non-heap", 10, MetricUtilsTest::redefineDummyClass, used); }
void appendValuesClause(StringBuilder sb) { sb.append("VALUES "); appendValues(sb, jdbcTable.dbFieldNames().size()); }
@Test void testAppendValuesClause() { PostgresUpsertQueryBuilder builder = new PostgresUpsertQueryBuilder(jdbcTable, dialect); StringBuilder sb = new StringBuilder(); builder.appendValuesClause(sb); String valuesClause = sb.toString(); assertThat(valuesClause).isEqualTo("VALUES (?,?)"); }
public URLNormalizer removeDotSegments() { String path = toURL().getPath().trim(); // (Bulleted comments are from RFC3986, section-5.2.4) // 1. The input buffer is initialized with the now-appended path // components and the output buffer is initialized to the empty // string. StringBuilder in = new StringBuilder(path); StringBuilder out = new StringBuilder(); // 2. While the input buffer is not empty, loop as follows: while (in.length() > 0) { // A. If the input buffer begins with a prefix of "../" or "./", // then remove that prefix from the input buffer; otherwise, if (startsWith(in, "../")) { deleteStart(in, "../"); } else if (startsWith(in, "./")) { deleteStart(in, "./"); } // B. if the input buffer begins with a prefix of "/./" or "/.", // where "." is a complete path segment, then replace that // prefix with "/" in the input buffer; otherwise, else if (startsWith(in, "/./")) { replaceStart(in, "/./", "/"); } else if (equalStrings(in, "/.")) { replaceStart(in, "/.", "/"); } // C. if the input buffer begins with a prefix of "/../" or "/..", // where ".." is a complete path segment, then replace that // prefix with "/" in the input buffer and remove the last // segment and its preceding "/" (if any) from the output // buffer; otherwise, else if (startsWith(in, "/../")) { replaceStart(in, "/../", "/"); removeLastSegment(out); } else if (equalStrings(in, "/..")) { replaceStart(in, "/..", "/"); removeLastSegment(out); } // D. if the input buffer consists only of "." or "..", then remove // that from the input buffer; otherwise, else if (equalStrings(in, "..")) { deleteStart(in, ".."); } else if (equalStrings(in, ".")) { deleteStart(in, "."); } // E. move the first path segment in the input buffer to the end of // the output buffer, including the initial "/" character (if // any) and any subsequent characters up to, but not including, // the next "/" character or the end of the input buffer. else { int nextSlashIndex = in.indexOf("/", 1); if (nextSlashIndex > -1) { out.append(in.substring(0, nextSlashIndex)); in.delete(0, nextSlashIndex); } else { out.append(in); in.setLength(0); } } } // 3. Finally, the output buffer is returned as the result of // remove_dot_segments. url = StringUtils.replaceOnce(url, path, out.toString()); return this; }
@Test public void testRemoveDotSegments() { s = "http://www.example.com/../a/b/../c/./d.html"; t = "http://www.example.com/a/c/d.html"; assertEquals(t, n(s).removeDotSegments().toString()); s = "http://www.example.com/a/../b/../c/./d.html"; t = "http://www.example.com/c/d.html"; assertEquals(t, n(s).removeDotSegments().toString()); // From ticket #173: s = "http://www.example.com/a/../../../../b/c/d/e/f.jpg"; t = "http://www.example.com/b/c/d/e/f.jpg"; assertEquals(t, n(s).removeDotSegments().toString()); //--- Tests from http://tools.ietf.org/html/rfc3986#section-5.4 --- String urlRoot = "http://a.com"; Map<String, String> m = new HashMap<>(); // 5.4.1 Normal Examples m.put("/b/c/." , "/b/c/"); m.put("/b/c/./" , "/b/c/"); m.put("/b/c/.." , "/b/"); m.put("/b/c/../" , "/b/"); m.put("/b/c/../g" , "/b/g"); m.put("/b/c/../.." , "/"); m.put("/b/c/../../" , "/"); m.put("/b/c/../../g" , "/g"); // 5.4.2. Abnormal Examples m.put("/b/c/../../../g" , "/g"); m.put("/b/c/../../../../g" , "/g"); m.put("/./g" , "/g"); m.put("/../g" , "/g"); m.put("/b/c/g." , "/b/c/g."); m.put("/b/c/.g" , "/b/c/.g"); m.put("/b/c/g.." , "/b/c/g.."); m.put("/b/c/..g" , "/b/c/..g"); m.put("/b/c/./../g" , "/b/g"); m.put("/b/c/./g/." , "/b/c/g/"); m.put("/b/c/g/./h" , "/b/c/g/h"); m.put("/b/c/g/../h" , "/b/c/h"); m.put("/b/c/g;x=1/./y" , "/b/c/g;x=1/y"); m.put("/b/c/g;x=1/../y" , "/b/c/y"); m.put("/b/c/g?y/./x" , "/b/c/g?y/./x"); m.put("/b/c/g?y/../x" , "/b/c/g?y/../x"); m.put("/b/c/g#s/./x" , "/b/c/g#s/./x"); m.put("/b/c/g#s/../x" , "/b/c/g#s/../x"); for (Map.Entry<String, String> e : m.entrySet()) { s = urlRoot + e.getKey(); t = urlRoot + e.getValue(); assertEquals(t, n(s).removeDotSegments().toString()); } }
@Override public List<SnowflakeIdentifier> listIcebergTables(SnowflakeIdentifier scope) { StringBuilder baseQuery = new StringBuilder("SHOW ICEBERG TABLES"); String[] queryParams = null; switch (scope.type()) { case ROOT: // account-level listing baseQuery.append(" IN ACCOUNT"); break; case DATABASE: // database-level listing baseQuery.append(" IN DATABASE IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; case SCHEMA: // schema-level listing baseQuery.append(" IN SCHEMA IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; default: throw new IllegalArgumentException( String.format("Unsupported scope type for listIcebergTables: %s", scope)); } final String finalQuery = baseQuery.toString(); final String[] finalQueryParams = queryParams; List<SnowflakeIdentifier> tables; try { tables = connectionPool.run( conn -> queryHarness.query(conn, finalQuery, TABLE_RESULT_SET_HANDLER, finalQueryParams)); } catch (SQLException e) { throw snowflakeExceptionToIcebergException( scope, e, String.format("Failed to list tables for scope '%s'", scope)); } catch (InterruptedException e) { throw new UncheckedInterruptedException( e, "Interrupted while listing tables for scope '%s'", scope); } tables.forEach( table -> Preconditions.checkState( table.type() == SnowflakeIdentifier.Type.TABLE, "Expected TABLE, got identifier '%s' for scope '%s'", table, scope)); return tables; }
@SuppressWarnings("unchecked") @Test public void testListIcebergTablesSQLExceptionWithoutErrorCode() throws SQLException, InterruptedException { Exception injectedException = new SQLException("Fake SQL exception"); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(UncheckedSQLException.class) .isThrownBy(() -> snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining("Failed to list tables for scope 'DATABASE: 'DB_1''") .withCause(injectedException); }
public void stop() { LOGGER.info("Stop game."); isRunning = false; }
@Test void testStop() { world.stop(); assertFalse(world.isRunning); }
public static ConfigResponse fromJson(String json) { return JsonUtil.parse(json, ConfigResponseParser::fromJson); }
@Test public void unknownFields() { ConfigResponse actual = ConfigResponseParser.fromJson("{\"x\": \"val\", \"y\": \"val2\"}"); ConfigResponse expected = ConfigResponse.builder().build(); // ConfigResponse doesn't implement hashCode/equals assertThat(actual.defaults()).isEqualTo(expected.defaults()).isEmpty(); assertThat(actual.overrides()).isEqualTo(expected.overrides()).isEmpty(); }
@Override public synchronized void close() throws IOException { mCloser.close(); }
@Test public void createClose() throws IOException, AlluxioException { AlluxioURI ufsPath = getUfsPath(); mFileSystem.createFile(ufsPath).close(); assertFalse(mFileSystem.getStatus(ufsPath).isFolder()); assertEquals(0L, mFileSystem.getStatus(ufsPath).getLength()); }
public static <K, E> Collector<E, ImmutableListMultimap.Builder<K, E>, ImmutableListMultimap<K, E>> index(Function<? super E, K> keyFunction) { return index(keyFunction, Function.identity()); }
@Test public void index_with_valueFunction_returns_ListMultimap() { ListMultimap<Integer, String> multimap = LIST.stream().collect(index(MyObj::getId, MyObj::getText)); assertThat(multimap.size()).isEqualTo(3); Map<Integer, Collection<String>> map = multimap.asMap(); assertThat(map.get(1)).containsOnly("A"); assertThat(map.get(2)).containsOnly("B"); assertThat(map.get(3)).containsOnly("C"); }
@GET @Produces(MediaType.TEXT_HTML) public Response auth( @QueryParam("scope") String scope, @QueryParam("state") String state, @QueryParam("response_type") String responseType, @QueryParam("client_id") String clientId, @QueryParam("redirect_uri") String redirectUri, @QueryParam("nonce") String nonce, @HeaderParam("Accept-Language") @DefaultValue("de-DE") String acceptLanguage) { var uri = mustParse(redirectUri); var res = authService.auth( new AuthorizationRequest(scope, state, responseType, clientId, uri, nonce)); var locale = getNegotiatedLocale(acceptLanguage); var form = pages.selectIdpForm(res.identityProviders(), locale); return Response.ok(form, MediaType.TEXT_HTML_TYPE) .cookie(createSessionCookie(res.sessionId())) .build(); }
@Test void auth_success_passParams() { var sessionId = IdGenerator.generateID(); var authService = mock(AuthService.class); when(authService.auth(any())).thenReturn(new AuthorizationResponse(List.of(), sessionId)); var sut = new AuthEndpoint(authService); var scope = "openid"; var state = UUID.randomUUID().toString(); var nonce = UUID.randomUUID().toString(); var responseType = "code"; var clientId = "myapp"; var language = "de-DE"; // when try (var res = sut.auth(scope, state, responseType, clientId, REDIRECT_URI, nonce, language)) { // then var captor = ArgumentCaptor.forClass(AuthorizationRequest.class); verify(authService).auth(captor.capture()); var req = captor.getValue(); assertEquals(REDIRECT_URI, req.redirectUri().toString()); assertEquals(scope, req.scope()); assertEquals(state, req.state()); assertEquals(responseType, req.responseType()); assertEquals(clientId, req.clientId()); assertEquals(nonce, req.nonce()); } }
public static <T> Object create(Class<T> iface, T implementation, RetryPolicy retryPolicy) { return RetryProxy.create(iface, new DefaultFailoverProxyProvider<T>(iface, implementation), retryPolicy); }
@Test public void testRpcInvocation() throws Exception { // For a proxy method should return true final UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create(UnreliableInterface.class, unreliableImpl, RETRY_FOREVER); assertTrue(RetryInvocationHandler.isRpcInvocation(unreliable)); final AtomicInteger count = new AtomicInteger(); // Embed the proxy in ProtocolTranslator ProtocolTranslator xlator = new ProtocolTranslator() { @Override public Object getUnderlyingProxyObject() { count.getAndIncrement(); return unreliable; } }; // For a proxy wrapped in ProtocolTranslator method should return true assertTrue(RetryInvocationHandler.isRpcInvocation(xlator)); // Ensure underlying proxy was looked at assertEquals(1, count.get()); // For non-proxy the method must return false assertFalse(RetryInvocationHandler.isRpcInvocation(new Object())); }
@Override public void checkBeforeUpdate(final CreateMaskRuleStatement sqlStatement) { ifNotExists = sqlStatement.isIfNotExists(); if (!ifNotExists) { checkDuplicatedRuleNames(sqlStatement); } checkAlgorithms(sqlStatement); }
@Test void assertCheckSQLStatementWithInvalidAlgorithm() { assertThrows(ServiceProviderNotFoundException.class, () -> executor.checkBeforeUpdate(createSQLStatement(false, "INVALID_TYPE"))); }
public void isNotIn(@Nullable Iterable<?> iterable) { checkNotNull(iterable); if (Iterables.contains(iterable, actual)) { failWithActual("expected not to be any of", iterable); } }
@Test public void isNotIn() { assertThat("x").isNotIn(oneShotIterable("a", "b", "c")); }
static Map<String, SerializableFunction<Map<String, Object>, Double>> getPredictorTermsMap(final List<PredictorTerm> predictorTerms) { predictorsArity.set(0); return predictorTerms.stream() .map(predictorTerm -> { int arity = predictorsArity.addAndGet(1); String variableName = predictorTerm.getName() != null ?predictorTerm.getName() : "predictorTermFunction" + arity; return new AbstractMap.SimpleEntry<>(variableName, getPredictorTermSerializableFunction(predictorTerm)); }) .collect(Collectors.toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue)); }
@Test void getPredictorTermsMap() { final List<PredictorTerm> predictorTerms = IntStream.range(0, 3).mapToObj(index -> { String predictorName = "predictorName-" + index; double coefficient = 1.23 * index; String fieldRef = "fieldRef-" + index; return PMMLModelTestUtils.getPredictorTerm(predictorName, coefficient, Collections.singletonList(fieldRef)); }).collect(Collectors.toList()); Map<String, SerializableFunction<Map<String, Object>, Double>> retrieved = KiePMMLRegressionTableFactory.getPredictorTermsMap(predictorTerms); assertThat(retrieved).hasSameSizeAs(predictorTerms); IntStream.range(0, predictorTerms.size()).forEach(index -> { PredictorTerm predictorTerm = predictorTerms.get(index); assertThat(retrieved).containsKey(predictorTerm.getName()); }); }
@Override protected void processOptions(LinkedList<String> args) throws IOException { CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, OPTION_PATHONLY, OPTION_DIRECTORY, OPTION_HUMAN, OPTION_HIDENONPRINTABLE, OPTION_RECURSIVE, OPTION_REVERSE, OPTION_MTIME, OPTION_SIZE, OPTION_ATIME, OPTION_ECPOLICY); cf.parse(args); pathOnly = cf.getOpt(OPTION_PATHONLY); dirRecurse = !cf.getOpt(OPTION_DIRECTORY); setRecursive(cf.getOpt(OPTION_RECURSIVE) && dirRecurse); humanReadable = cf.getOpt(OPTION_HUMAN); hideNonPrintable = cf.getOpt(OPTION_HIDENONPRINTABLE); orderReverse = cf.getOpt(OPTION_REVERSE); orderTime = cf.getOpt(OPTION_MTIME); orderSize = !orderTime && cf.getOpt(OPTION_SIZE); useAtime = cf.getOpt(OPTION_ATIME); displayECPolicy = cf.getOpt(OPTION_ECPOLICY); if (args.isEmpty()) args.add(Path.CUR_DIR); initialiseOrderComparator(); }
@Test(expected = UnsupportedOperationException.class) public void processPathDirDisplayECPolicyWhenUnsupported() throws IOException { TestFile testFile = new TestFile("testDirectory", "testFile"); TestFile testDir = new TestFile("", "testDirectory"); testDir.setIsDir(true); testDir.addContents(testFile); LinkedList<PathData> pathData = new LinkedList<PathData>(); pathData.add(testDir.getPathData()); Ls ls = new Ls(); LinkedList<String> options = new LinkedList<String>(); options.add("-e"); ls.processOptions(options); ls.processArguments(pathData); }
@Override public List<QueueTimeSpan> queryConsumeTimeSpan(final String topic, final String group) throws InterruptedException, MQBrokerException, RemotingException, MQClientException { return this.defaultMQAdminExtImpl.queryConsumeTimeSpan(topic, group); }
@Test public void testQueryConsumeTimeSpan() throws InterruptedException, RemotingException, MQClientException, MQBrokerException { List<QueueTimeSpan> result = defaultMQAdminExt.queryConsumeTimeSpan("unit-test", "default-broker-group"); assertThat(result.size()).isEqualTo(0); }
public boolean eval(ContentFile<?> file) { // TODO: detect the case where a column is missing from the file using file's max field id. return new MetricsEvalVisitor().eval(file); }
@Test public void testStringNotStartsWith() { boolean shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "a"), true).eval(FILE); assertThat(shouldRead).as("Should read: no stats").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "a"), true).eval(FILE_2); assertThat(shouldRead).as("Should read: range matches").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "aa"), true).eval(FILE_2); assertThat(shouldRead).as("Should read: range matches").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "aaa"), true).eval(FILE_2); assertThat(shouldRead).as("Should read: range matches").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "1s"), true).eval(FILE_3); assertThat(shouldRead).as("Should read: range matches").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "1str1x"), true) .eval(FILE_3); assertThat(shouldRead).as("Should read: range matches").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "ff"), true).eval(FILE_4); assertThat(shouldRead).as("Should read: range matches").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "aB"), true).eval(FILE_2); assertThat(shouldRead).as("Should read: range matches").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "dWX"), true).eval(FILE_2); assertThat(shouldRead).as("Should read: range matches").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "5"), true).eval(FILE_3); assertThat(shouldRead).as("Should read: range matches").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", "3str3x"), true) .eval(FILE_3); assertThat(shouldRead).as("Should read: range matches").isTrue(); String aboveMax = UnicodeUtil.truncateStringMax(Literal.of("イロハニホヘト"), 4).value().toString(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, notStartsWith("required", aboveMax), true) .eval(FILE_4); assertThat(shouldRead).as("Should read: range matches").isTrue(); }
public static <T extends Serializable> CacheableOptional<T> of(final T value) { return new CacheableOptional<>(value); }
@Test public void equalsIsAppropriate() { assertThat(CacheableOptional.of("my-test"), is(CacheableOptional.of("my-test"))); assertThat(CacheableOptional.of("my-test"), is(not(CacheableOptional.of("not-my-test")))); }
public static GenericSchemaImpl of(SchemaInfo schemaInfo) { return of(schemaInfo, true); }
@Test public void testGenericAvroSchema() { Schema<Foo> encodeSchema = Schema.AVRO(Foo.class); GenericSchema decodeSchema = GenericSchemaImpl.of(encodeSchema.getSchemaInfo()); testEncodeAndDecodeGenericRecord(encodeSchema, decodeSchema); }
public static List<AclEntry> filterDefaultAclEntries( List<AclEntry> existingAcl) throws AclException { ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES); for (AclEntry existingEntry: existingAcl) { if (existingEntry.getScope() == DEFAULT) { // Default entries sort after access entries, so we can exit early. break; } aclBuilder.add(existingEntry); } return buildAndValidateAcl(aclBuilder); }
@Test public void testFilterDefaultAclEntries() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, "sales", READ_EXECUTE)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, GROUP, "sales", READ_EXECUTE)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, READ_EXECUTE)) .build(); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, "sales", READ_EXECUTE)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); assertEquals(expected, filterDefaultAclEntries(existing)); }
public final void containsKey(@Nullable Object key) { check("keySet()").that(checkNotNull(actual).keySet()).contains(key); }
@Test public void containsKey_failsWithSameToString() { expectFailureWhenTestingThat( ImmutableMultimap.of(1L, "value1a", 1L, "value1b", 2L, "value2", "1", "value3")) .containsKey(1); assertFailureKeys( "value of", "expected to contain", "an instance of", "but did not", "though it did contain", "full contents", "multimap was"); assertFailureValue("value of", "multimap.keySet()"); assertFailureValue("expected to contain", "1"); }
public int getScale() { return scale; }
@Test public void default_precision_is_20() { DecimalColumnDef def = new DecimalColumnDef.Builder() .setColumnName("issues") .setPrecision(30) .setIsNullable(true) .build(); assertThat(def.getScale()).isEqualTo(20); }
public static void writeFully(OutputStream outputStream, ByteBuffer buffer) throws IOException { if (!buffer.hasRemaining()) { return; } byte[] chunk = new byte[WRITE_CHUNK_SIZE]; while (buffer.hasRemaining()) { int chunkSize = Math.min(chunk.length, buffer.remaining()); buffer.get(chunk, 0, chunkSize); outputStream.write(chunk, 0, chunkSize); } }
@Test public void testWriteFully() throws Exception { byte[] input = Strings.repeat("Welcome to Warsaw!\n", 12345).getBytes(StandardCharsets.UTF_8); InMemoryOutputFile outputFile = new InMemoryOutputFile(); try (PositionOutputStream outputStream = outputFile.create()) { IOUtil.writeFully(outputStream, ByteBuffer.wrap(input.clone())); } assertThat(outputFile.toByteArray()).isEqualTo(input); }
static Result coerceUserList( final Collection<Expression> expressions, final ExpressionTypeManager typeManager ) { return coerceUserList(expressions, typeManager, Collections.emptyMap()); }
@Test public void shouldCoerceToBigInts() { // Given: final ImmutableList<Expression> expressions = ImmutableList.of( new IntegerLiteral(10), new LongLiteral(1234567890), new StringLiteral("\t -100 \t"), BIGINT_EXPRESSION, INT_EXPRESSION ); // When: final Result result = CoercionUtil.coerceUserList(expressions, typeManager); // Then: assertThat(result.commonType(), is(Optional.of(SqlTypes.BIGINT))); assertThat(result.expressions(), is(ImmutableList.of( new LongLiteral(10), new LongLiteral(1234567890), new LongLiteral(-100), BIGINT_EXPRESSION, cast(INT_EXPRESSION, SqlTypes.BIGINT) ))); }
public void setScmId(String scmId) { this.scmId = scmId; }
@Test public void shouldAddErrorWhenAssociatedSCMPluginIsMissing() { PipelineConfigSaveValidationContext configSaveValidationContext = mock(PipelineConfigSaveValidationContext.class); when(configSaveValidationContext.findScmById(anyString())).thenReturn(mock(SCM.class)); SCM scmConfig = mock(SCM.class); when(scmConfig.doesPluginExist()).thenReturn(false); PluggableSCMMaterialConfig pluggableSCMMaterialConfig = new PluggableSCMMaterialConfig(null, scmConfig, "usr/home", null, false); pluggableSCMMaterialConfig.setScmId("scm-id"); pluggableSCMMaterialConfig.validateTree(configSaveValidationContext); assertThat(pluggableSCMMaterialConfig.errors().getAll().size(), is(1)); assertThat(pluggableSCMMaterialConfig.errors().on(PluggableSCMMaterialConfig.SCM_ID), is("Could not find plugin for scm-id: [scm-id].")); }
@Override public void subscribe(URL url, NotifyListener listener) { if (url == null) { throw new IllegalArgumentException("subscribe url == null"); } if (listener == null) { throw new IllegalArgumentException("subscribe listener == null"); } if (logger.isInfoEnabled()) { logger.info("Subscribe: " + url); } Set<NotifyListener> listeners = subscribed.computeIfAbsent(url, n -> new ConcurrentHashSet<>()); listeners.add(listener); }
@Test void testSubscribeIfListenerNull() { Assertions.assertThrows(IllegalArgumentException.class, () -> { final AtomicReference<Boolean> notified = new AtomicReference<Boolean>(false); NotifyListener listener = urls -> notified.set(Boolean.TRUE); URL url = new ServiceConfigURL("dubbo", "192.168.0.1", 2200); abstractRegistry.subscribe(url, null); Assertions.fail("listener url == null"); }); }
public ConfigurationProperty getProperty(final String key) { return stream().filter(item -> item.getConfigurationKey().getName().equals(key)).findFirst().orElse(null); }
@Test void shouldGetConfigPropertyForGivenKey() { ConfigurationProperty property1 = new ConfigurationProperty(new ConfigurationKey("key1"), new ConfigurationValue("value1"), null, null); ConfigurationProperty property2 = new ConfigurationProperty(new ConfigurationKey("key2"), new ConfigurationValue("value2"), null, null); Configuration config = new Configuration(property1, property2); assertThat(config.getProperty("key2")).isEqualTo(property2); }
public FEELFnResult<TemporalAccessor> invoke(@ParameterName("from") String val) { if ( val == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null")); } try { TemporalAccessor parsed = FEEL_TIME.parse(val); if (parsed.query(TemporalQueries.offset()) != null) { // it is an offset-zoned time, so I can know for certain an OffsetTime OffsetTime asOffSetTime = parsed.query(OffsetTime::from); return FEELFnResult.ofResult(asOffSetTime); } else if (parsed.query(TemporalQueries.zone()) == null) { // if it does not contain any zone information at all, then I know for certain is a local time. LocalTime asLocalTime = parsed.query(LocalTime::from); return FEELFnResult.ofResult(asLocalTime); } else if (parsed.query(TemporalQueries.zone()) != null) { boolean hasSeconds = timeStringWithSeconds(val); LocalTime asLocalTime = parsed.query(LocalTime::from); ZoneId zoneId = parsed.query(TemporalQueries.zone()); ZoneTime zoneTime = ZoneTime.of(asLocalTime, zoneId, hasSeconds); return FEELFnResult.ofResult(zoneTime); } return FEELFnResult.ofResult(parsed); } catch (DateTimeException e) { return manageDateTimeException(e, val); } }
@Test void invokeTimeUnitsParamsNoOffsetWithNanoseconds() { FunctionTestUtil.assertResult(timeFunction.invoke(10, 43, BigDecimal.valueOf(15.154), null), LocalTime.of(10, 43, 15, 154000000)); }
public void updateMetrics(String stepName, List<MonitoringInfo> monitoringInfos) { getMetricsContainer(stepName).update(monitoringInfos); updateMetrics(stepName); }
@Test void testCounterMonitoringInfoUpdate() { MonitoringInfo userMonitoringInfo = new SimpleMonitoringInfoBuilder() .setUrn(MonitoringInfoConstants.Urns.USER_SUM_INT64) .setLabel(MonitoringInfoConstants.Labels.NAMESPACE, DEFAULT_NAMESPACE) .setLabel(MonitoringInfoConstants.Labels.NAME, "myCounter") .setLabel(MonitoringInfoConstants.Labels.PTRANSFORM, "anyPTransform") .setInt64SumValue(111) .build(); assertThat(metricGroup.get("myCounter")).isNull(); container.updateMetrics("step", ImmutableList.of(userMonitoringInfo)); Counter userCounter = (Counter) metricGroup.get("myCounter"); assertThat(userCounter.getCount()).isEqualTo(111L); }
public static String escapeLuceneQuery(final CharSequence text) { if (text == null) { return null; } final int size = text.length() << 1; final StringBuilder buf = new StringBuilder(size); appendEscapedLuceneQuery(buf, text); return buf.toString(); }
@Test public void testEscapeLuceneQuery_null() { CharSequence text = null; String expResult = null; String result = LuceneUtils.escapeLuceneQuery(text); assertEquals(expResult, result); }
private void validateHmsUri(String catalogHmsUri) { if (catalogHmsUri == null) { return; } Configuration conf = SparkSession.active().sessionState().newHadoopConf(); String envHmsUri = conf.get(HiveConf.ConfVars.METASTOREURIS.varname, null); if (envHmsUri == null) { return; } Preconditions.checkArgument( catalogHmsUri.equals(envHmsUri), "Inconsistent Hive metastore URIs: %s (Spark session) != %s (spark_catalog)", envHmsUri, catalogHmsUri); }
@Test public void testValidateHmsUri() { // HMS uris match Assert.assertTrue( spark .sessionState() .catalogManager() .v2SessionCatalog() .defaultNamespace()[0] .equals("default")); // HMS uris doesn't match spark.sessionState().catalogManager().reset(); String catalogHmsUri = "RandomString"; spark.conf().set(envHmsUriKey, hmsUri); spark.conf().set(catalogHmsUriKey, catalogHmsUri); IllegalArgumentException exception = Assert.assertThrows( IllegalArgumentException.class, () -> spark.sessionState().catalogManager().v2SessionCatalog()); String errorMessage = String.format( "Inconsistent Hive metastore URIs: %s (Spark session) != %s (spark_catalog)", hmsUri, catalogHmsUri); Assert.assertEquals(errorMessage, exception.getMessage()); // no env HMS uri, only catalog HMS uri spark.sessionState().catalogManager().reset(); spark.conf().set(catalogHmsUriKey, hmsUri); spark.conf().unset(envHmsUriKey); Assert.assertTrue( spark .sessionState() .catalogManager() .v2SessionCatalog() .defaultNamespace()[0] .equals("default")); // no catalog HMS uri, only env HMS uri spark.sessionState().catalogManager().reset(); spark.conf().set(envHmsUriKey, hmsUri); spark.conf().unset(catalogHmsUriKey); Assert.assertTrue( spark .sessionState() .catalogManager() .v2SessionCatalog() .defaultNamespace()[0] .equals("default")); }
public static Object typeConvert(String tableName ,String columnName, String value, int sqlType, String mysqlType) { if (value == null || (value.equals("") && !(isText(mysqlType) || sqlType == Types.CHAR || sqlType == Types.VARCHAR || sqlType == Types.LONGVARCHAR))) { return null; } try { Object res; switch (sqlType) { case Types.INTEGER: res = Integer.parseInt(value); break; case Types.SMALLINT: res = Short.parseShort(value); break; case Types.BIT: case Types.TINYINT: res = Byte.parseByte(value); break; case Types.BIGINT: if (mysqlType.startsWith("bigint") && mysqlType.endsWith("unsigned")) { res = new BigInteger(value); } else { res = Long.parseLong(value); } break; // case Types.BIT: case Types.BOOLEAN: res = !"0".equals(value); break; case Types.DOUBLE: case Types.FLOAT: res = Double.parseDouble(value); break; case Types.REAL: res = Float.parseFloat(value); break; case Types.DECIMAL: case Types.NUMERIC: res = new BigDecimal(value); break; case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: case Types.BLOB: res = value.getBytes("ISO-8859-1"); break; case Types.DATE: if (!value.startsWith("0000-00-00")) { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Date(date.getTime()); } else { res = null; } } else { res = null; } break; case Types.TIME: { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Time(date.getTime()); } else { res = null; } break; } case Types.TIMESTAMP: if (!value.startsWith("0000-00-00")) { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Timestamp(date.getTime()); } else { res = null; } } else { res = null; } break; case Types.CLOB: default: res = value; break; } return res; } catch (Exception e) { logger.error("table: {} column: {}, failed convert type {} to {}", tableName, columnName, value, sqlType); return value; } }
@Test public void typeConvertInputNotNullNotNullNotNullPositiveNotNullOutputFalse() { // Arrange final String tableName = "?????????"; final String columnName = "?"; final String value = "0"; final int sqlType = 16; final String mysqlType = "bigint\u046bunsigned"; // Act final Object actual = JdbcTypeUtil.typeConvert(tableName, columnName, value, sqlType, mysqlType); // Assert result Assert.assertFalse((boolean)actual); }
@Override protected String processLink(IExpressionContext context, String link) { if (link == null || !linkInSite(externalUrlSupplier.get(), link)) { return link; } if (StringUtils.isBlank(link)) { link = "/"; } if (isAssetsRequest(link)) { return PathUtils.combinePath(THEME_PREVIEW_PREFIX, theme.getName(), link); } // not assets link if (theme.isActive()) { return link; } return UriComponentsBuilder.fromUriString(link) .queryParam(ThemeContext.THEME_PREVIEW_PARAM_NAME, theme.getName()) .build().toString(); }
@Test void processNullLink() { ThemeLinkBuilder themeLinkBuilder = new ThemeLinkBuilder(getTheme(false), externalUrlSupplier); String link = null; String processed = themeLinkBuilder.processLink(null, link); assertThat(processed).isEqualTo(null); // empty link link = ""; processed = themeLinkBuilder.processLink(null, link); assertThat(processed).isEqualTo("/?preview-theme=test-theme"); }
public void shutdownConfigRetriever() { retriever.shutdown(); }
@Disabled("because logAndDie is impossible(?) to verify programmatically") @Test void manually_verify_what_happens_when_first_graph_contains_component_that_throws_exception_in_ctor() { writeBootstrapConfigs("thrower", ComponentThrowingExceptionInConstructor.class); Container container = newContainer(dirConfigSource); try { getNewComponentGraph(container); fail("Expected to log and die."); } catch (Throwable t) { fail("Expected to log and die"); } container.shutdownConfigRetriever(); }
@Override public long read() { return gaugeSource.read(); }
@Test public void whenNotVisitedWithCachedValueReadsDefault() { DynamicMetricsProvider concreteProvider = (descriptor, context) -> context.collect(descriptor.withPrefix("foo"), "longField", INFO, COUNT, 42); metricsRegistry.registerDynamicMetricsProvider(concreteProvider); LongGaugeImpl longGauge = metricsRegistry.newLongGauge("foo.longField"); // needed to collect dynamic metrics and update the gauge created from them metricsRegistry.collect(mock(MetricsCollector.class)); assertEquals(42, longGauge.read()); // clears the cached metric source metricsRegistry.deregisterDynamicMetricsProvider(concreteProvider); metricsRegistry.collect(mock(MetricsCollector.class)); assertEquals(LongGaugeImpl.DEFAULT_VALUE, longGauge.read()); }
@Override public List<MailTemplateDO> getMailTemplateList() {return mailTemplateMapper.selectList();}
@Test public void testGetMailTemplateList() { // mock 数据 MailTemplateDO dbMailTemplate01 = randomPojo(MailTemplateDO.class); mailTemplateMapper.insert(dbMailTemplate01); MailTemplateDO dbMailTemplate02 = randomPojo(MailTemplateDO.class); mailTemplateMapper.insert(dbMailTemplate02); // 调用 List<MailTemplateDO> list = mailTemplateService.getMailTemplateList(); // 断言 assertEquals(2, list.size()); assertEquals(dbMailTemplate01, list.get(0)); assertEquals(dbMailTemplate02, list.get(1)); }
@VisibleForTesting static StreamExecutionEnvironment createStreamExecutionEnvironment(FlinkPipelineOptions options) { return createStreamExecutionEnvironment( options, MoreObjects.firstNonNull(options.getFilesToStage(), Collections.emptyList()), options.getFlinkConfDir()); }
@Test public void useDefaultParallelismFromContextStreaming() { FlinkPipelineOptions options = getDefaultPipelineOptions(); options.setRunner(TestFlinkRunner.class); StreamExecutionEnvironment sev = FlinkExecutionEnvironments.createStreamExecutionEnvironment(options); assertThat(sev, instanceOf(LocalStreamEnvironment.class)); assertThat(options.getParallelism(), is(LocalStreamEnvironment.getDefaultLocalParallelism())); assertThat(sev.getParallelism(), is(LocalStreamEnvironment.getDefaultLocalParallelism())); }
@KeyboardExtension.KeyboardExtensionType public int getExtensionType() { return mExtensionType; }
@Test public void testGetCurrentKeyboardExtensionExtensionDefault() throws Exception { KeyboardExtension extension = AnyApplication.getKeyboardExtensionFactory(getApplicationContext()).getEnabledAddOn(); Assert.assertNotNull(extension); Assert.assertEquals("6f1ecea0-dee2-11e0-9572-0800200c9a66", extension.getId()); Assert.assertEquals(KeyboardExtension.TYPE_EXTENSION, extension.getExtensionType()); Assert.assertEquals(R.xml.ext_kbd_ext_keyboard_numbers_symbols, extension.getKeyboardResId()); }
@Override public Cursor<byte[]> scan(RedisClusterNode node, ScanOptions options) { return new ScanCursor<byte[]>(0, options) { private RedisClient client = getEntry(node); @Override protected ScanIteration<byte[]> doScan(long cursorId, ScanOptions options) { if (isQueueing() || isPipelined()) { throw new UnsupportedOperationException("'SSCAN' cannot be called in pipeline / transaction mode."); } if (client == null) { return null; } List<Object> args = new ArrayList<Object>(); if (cursorId == 101010101010101010L) { cursorId = 0; } args.add(Long.toUnsignedString(cursorId)); if (options.getPattern() != null) { args.add("MATCH"); args.add(options.getPattern()); } if (options.getCount() != null) { args.add("COUNT"); args.add(options.getCount()); } RFuture<ListScanResult<byte[]>> f = executorService.readAsync(client, ByteArrayCodec.INSTANCE, RedisCommands.SCAN, args.toArray()); ListScanResult<byte[]> res = syncFuture(f); String pos = res.getPos(); client = res.getRedisClient(); if ("0".equals(pos)) { client = null; } return new ScanIteration<byte[]>(Long.parseUnsignedLong(pos), res.getValues()); } }.open(); }
@Test public void testScan() { for (int i = 0; i < 1000; i++) { connection.set(("" + i).getBytes(StandardCharsets.UTF_8), ("" + i).getBytes(StandardCharsets.UTF_8)); } Cursor<byte[]> b = connection.scan(ScanOptions.scanOptions().build()); int counter = 0; while (b.hasNext()) { b.next(); counter++; } assertThat(counter).isEqualTo(1000); }
public static String unhash(int port) { return unhash(new StringBuilder(), port, 'z').toString(); }
@Test public void testUnhash() { // theoretically up to 65535 but let's be greedy and waste 10 ms for (int port = 0; port < 100_000; ++port) { String hash = Utils.unhash(port); assertThat(hash, notNullValue()); assertThat(hash.hashCode(), is(port)); } }
@Override public ObjectName createName(String type, String domain, MetricName metricName) { String name = metricName.getKey(); try { ObjectName objectName = new ObjectName(domain, "name", name); if (objectName.isPattern()) { objectName = new ObjectName(domain, "name", ObjectName.quote(name)); } return objectName; } catch (MalformedObjectNameException e) { try { return new ObjectName(domain, "name", ObjectName.quote(name)); } catch (MalformedObjectNameException e1) { LOGGER.warn("Unable to register {} {}", type, name, e1); throw new RuntimeException(e1); } } }
@Test public void createsObjectNameWithDomainInInput() { DefaultObjectNameFactory f = new DefaultObjectNameFactory(); ObjectName on = f.createName("type", "com.domain", MetricName.build("something.with.dots")); assertThat(on.getDomain()).isEqualTo("com.domain"); }
@Override public List<Integer> applyTransforms(List<Integer> originalGlyphIds) { List<Integer> intermediateGlyphsFromGsub = originalGlyphIds; for (String feature : FEATURES_IN_ORDER) { if (!gsubData.isFeatureSupported(feature)) { LOG.debug("the feature {} was not found", feature); continue; } LOG.debug("applying the feature {}", feature); ScriptFeature scriptFeature = gsubData.getFeature(feature); intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature, intermediateGlyphsFromGsub); } return Collections.unmodifiableList(repositionGlyphs(intermediateGlyphsFromGsub)); }
@Test void testApplyTransforms_la_e_la_e() { // given List<Integer> glyphsAfterGsub = Arrays.asList(67, 108, 369, 101, 94); // when List<Integer> result = gsubWorkerForBengali.applyTransforms(getGlyphIds("কল্লোল")); // then assertEquals(glyphsAfterGsub, result); }
public T setEnableSource(boolean enableSource) { attributes.put("_source", ImmutableSortedMap.of(ENABLED, enableSource)); return castThis(); }
@Test @UseDataProvider("indexWithAndWithoutRelations") public void index_without_source(Index index) { NewIndex newIndex = new SimplestNewIndex(IndexType.main(index, "foo"), defaultSettingsConfiguration); newIndex.setEnableSource(false); assertThat(getAttributeAsMap(newIndex, "_source")).containsExactly(entry("enabled", false)); }
@Override public boolean compareAndSet(long expect, long update) { return get(compareAndSetAsync(expect, update)); }
@Test public void testCompareAndSet() { RAtomicLong al = redisson.getAtomicLong("test"); Assertions.assertFalse(al.compareAndSet(-1, 2)); Assertions.assertEquals(0, al.get()); Assertions.assertTrue(al.compareAndSet(0, 2)); Assertions.assertEquals(2, al.get()); }
int nextAvailableBaseport(int numPorts) { int range = 0; int port = BASE_PORT; for (; port < BASE_PORT + MAX_PORTS && (range < numPorts); port++) { if (!isFree(port)) { range = 0; continue; } range++; } return range == numPorts ? port - range : 0; }
@Test void next_available_baseport_is_BASE_PORT_when_no_ports_have_been_reserved() { HostPorts host = new HostPorts("myhostname"); assertThat(host.nextAvailableBaseport(1), is(HostPorts.BASE_PORT)); }
public SpringComponentContainer createChild() { return new SpringComponentContainer(this); }
@Test public void createChild_method_should_spawn_a_child_container(){ SpringComponentContainer parent = new SpringComponentContainer(); SpringComponentContainer child = parent.createChild(); assertThat(child).isNotEqualTo(parent); assertThat(child.parent).isEqualTo(parent); assertThat(parent.children).contains(child); }
public List<String> getStringValue() { return stringValue; }
@Test public void getStringValue() { JobScheduleParam jobScheduleParam = mock( JobScheduleParam.class ); when( jobScheduleParam.getStringValue() ).thenCallRealMethod(); List<String> stringValue = new ArrayList<>(); stringValue.add( "hitachi" ); ReflectionTestUtils.setField( jobScheduleParam, "stringValue", stringValue ); Assert.assertEquals( stringValue, jobScheduleParam.getStringValue() ); }
public static Matcher<? super Object> hasJsonPath(String jsonPath) { return describedAs("has json path %0", isJson(withJsonPath(jsonPath)), jsonPath); }
@Test public void shouldNotMatchInvalidJsonWithPathAndValue() { assertThat(INVALID_JSON, not(hasJsonPath("$.path", anything()))); assertThat(new Object(), not(hasJsonPath("$.path", anything()))); assertThat(null, not(hasJsonPath("$.message", anything()))); }
@Override @Deprecated public <VR> KStream<K, VR> flatTransformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, Iterable<VR>> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doFlatTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), NamedInternal.empty(), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullValueTransformerSupplierOnFlatTransformValuesWithNamed() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.flatTransformValues( (org.apache.kafka.streams.kstream.ValueTransformerSupplier<Object, Iterable<Object>>) null, Named.as("flatValueTransformer"))); assertThat(exception.getMessage(), equalTo("valueTransformerSupplier can't be null")); }
void recordRecordsFetched(int records) { recordsFetched.record(records); }
@Test public void testRecordsFetched() { shareFetchMetricsManager.recordRecordsFetched(7); time.sleep(metrics.config().timeWindowMs() + 1); shareFetchMetricsManager.recordRecordsFetched(9); assertEquals(9, (double) getMetric(shareFetchMetricsRegistry.recordsPerRequestMax).metricValue()); assertEquals(8, (double) getMetric(shareFetchMetricsRegistry.recordsPerRequestAvg).metricValue(), EPSILON); }
@SuppressWarnings("unchecked") public SchemaKTable<?> aggregate( final List<ColumnName> nonAggregateColumns, final List<FunctionCall> aggregations, final Optional<WindowExpression> windowExpression, final FormatInfo valueFormat, final Stacker contextStacker ) { final ExecutionStep<? extends KTableHolder<?>> step; final KeyFormat keyFormat; if (windowExpression.isPresent()) { keyFormat = getKeyFormat(windowExpression.get()); step = ExecutionStepFactory.streamWindowedAggregate( contextStacker, sourceStep, InternalFormats.of(keyFormat, valueFormat), nonAggregateColumns, aggregations, windowExpression.get().getKsqlWindowExpression() ); } else { keyFormat = SerdeFeaturesFactory.sanitizeKeyFormat( this.keyFormat, toSqlTypes(schema.key()), false ); step = ExecutionStepFactory.streamAggregate( contextStacker, sourceStep, InternalFormats.of(keyFormat, valueFormat), nonAggregateColumns, aggregations ); } return new SchemaKTable( step, resolveSchema(step), keyFormat, ksqlConfig, functionRegistry ); }
@Test public void shouldReturnKTableWithOutputSchema() { // When: final SchemaKTable result = schemaGroupedStream.aggregate( NON_AGGREGATE_COLUMNS, ImmutableList.of(AGG), Optional.empty(), valueFormat.getFormatInfo(), queryContext ); // Then: assertThat(result.getSchema(), is(OUT_SCHEMA)); }
@Override public MetadataStore create(String metadataURL, MetadataStoreConfig metadataStoreConfig, boolean enableSessionWatcher) throws MetadataStoreException { return new EtcdMetadataStore(metadataURL, metadataStoreConfig, enableSessionWatcher); }
@Test public void testCluster() throws Exception { @Cleanup EtcdCluster etcdCluster = EtcdClusterExtension.builder().withClusterName("test-cluster").withNodes(3) .withSsl(false).build().cluster(); etcdCluster.start(); EtcdConfig etcdConfig = EtcdConfig.builder().useTls(false) .tlsProvider(null) .authority("etcd0") .build(); Path etcdConfigPath = Files.createTempFile("etcd_config_cluster", ".yml"); new ObjectMapper(new YAMLFactory()).writeValue(etcdConfigPath.toFile(), etcdConfig); String metadataURL = "etcd:" + etcdCluster.clientEndpoints().stream().map(URI::toString).collect(Collectors.joining(",")); @Cleanup MetadataStore store = MetadataStoreFactory.create(metadataURL, MetadataStoreConfig.builder().configFilePath(etcdConfigPath.toString()).build()); store.put("/test", "value".getBytes(StandardCharsets.UTF_8), Optional.empty()).join(); assertTrue(store.exists("/test").join()); }
public static void validate(BugPattern pattern) throws ValidationException { if (pattern == null) { throw new ValidationException("No @BugPattern provided"); } // name must not contain spaces if (CharMatcher.whitespace().matchesAnyOf(pattern.name())) { throw new ValidationException("Name must not contain whitespace: " + pattern.name()); } // linkType must be consistent with link element. switch (pattern.linkType()) { case CUSTOM: if (pattern.link().isEmpty()) { throw new ValidationException("Expected a custom link but none was provided"); } break; case AUTOGENERATED: case NONE: if (!pattern.link().isEmpty()) { throw new ValidationException("Expected no custom link but found: " + pattern.link()); } break; } }
@Test public void linkTypeNoneAndNoLink() throws Exception { @BugPattern( name = "LinkTypeNoneAndNoLink", summary = "linkType none and no link", explanation = "linkType none and no link", severity = SeverityLevel.ERROR, linkType = LinkType.NONE) final class BugPatternTestClass {} BugPattern annotation = BugPatternTestClass.class.getAnnotation(BugPattern.class); BugPatternValidator.validate(annotation); }
static JavaClasses of(Iterable<JavaClass> classes) { Map<String, JavaClass> mapping = new HashMap<>(); for (JavaClass clazz : classes) { mapping.put(clazz.getName(), clazz); } JavaPackage defaultPackage = !Iterables.isEmpty(classes) ? getRoot(classes.iterator().next().getPackage()) : JavaPackage.from(classes); return new JavaClasses(defaultPackage, mapping); }
@Test public void javaClasses_of_iterable() { ImmutableSet<JavaClass> iterable = ImmutableSet.of(importClassWithContext(JavaClassesTest.class), importClassWithContext(JavaClass.class)); JavaClasses classes = JavaClasses.of(iterable); assertThat(ImmutableSet.copyOf(classes)).isEqualTo(iterable); }
@Override public void copyFrom( FileObject file, FileSelector selector ) throws FileSystemException { requireResolvedFileObject().copyFrom( file, selector ); }
@Test public void testDelegatesCopyFrom() throws FileSystemException { FileObject fromFileObject = mock( FileObject.class ); FileSelector fileSelector = mock( FileSelector.class ); fileObject.copyFrom( fromFileObject, fileSelector ); verify( resolvedFileObject, times( 1 ) ).copyFrom( fromFileObject, fileSelector ); }
public static HealthStateScope forStage(String pipelineName, String stageName) { return new HealthStateScope(ScopeType.STAGE, pipelineName + "/" + stageName); }
@Test public void shouldHaveUniqueScopeForStages() { HealthStateScope scope1 = HealthStateScope.forStage("blahPipeline", "blahStage"); HealthStateScope scope2 = HealthStateScope.forStage("blahPipeline", "blahStage"); HealthStateScope scope25 = HealthStateScope.forStage("blahPipeline", "blahOtherStage"); HealthStateScope scope3 = HealthStateScope.forStage("blahOtherPipeline", "blahOtherStage"); assertThat(scope1, is(scope2)); assertThat(scope1, not(scope25)); assertThat(scope1, not(scope3)); }
public ReferenceBuilder<T> protocol(String protocol) { this.protocol = protocol; return getThis(); }
@Test void protocol() { ReferenceBuilder builder = new ReferenceBuilder(); builder.protocol("protocol"); Assertions.assertEquals("protocol", builder.build().getProtocol()); }
public boolean matchStage(StageConfigIdentifier stageIdentifier, StageEvent event) { return this.event.include(event) && appliesTo(stageIdentifier.getPipelineName(), stageIdentifier.getStageName()); }
@Test void shouldMatchBrokenStage() { NotificationFilter filter = new NotificationFilter("cruise", "dev", StageEvent.Breaks, false); assertThat(filter.matchStage(new StageConfigIdentifier("cruise", "dev"), StageEvent.Breaks)).isTrue(); }
@Override public int length() { return 2; }
@Test public void testLength() { System.out.println("length"); LogNormalDistribution instance = new LogNormalDistribution(1.0, 1.0); instance.rand(); assertEquals(2, instance.length()); }
List<Condition> run(boolean useKRaft) { List<Condition> warnings = new ArrayList<>(); checkKafkaReplicationConfig(warnings); checkKafkaBrokersStorage(warnings); if (useKRaft) { // Additional checks done for KRaft clusters checkKRaftControllerStorage(warnings); checkKRaftControllerCount(warnings); checkKafkaMetadataVersion(warnings); checkInterBrokerProtocolVersionInKRaft(warnings); checkLogMessageFormatVersionInKRaft(warnings); } else { // Additional checks done for ZooKeeper-based clusters checkKafkaLogMessageFormatVersion(warnings); checkKafkaInterBrokerProtocolVersion(warnings); checkKRaftMetadataStorageConfiguredForZooBasedCLuster(warnings); } return warnings; }
@Test public void testMetadataVersionIsOlderThanKafkaVersionWithLongVersion() { Kafka kafka = new KafkaBuilder(KAFKA) .editSpec() .editKafka() .withVersion(KafkaVersionTestUtils.LATEST_KAFKA_VERSION) .withMetadataVersion(KafkaVersionTestUtils.PREVIOUS_METADATA_VERSION + "-IV0") .endKafka() .endSpec() .build(); KafkaSpecChecker checker = generateChecker(kafka, List.of(CONTROLLERS, POOL_A), new KafkaVersionChange(VERSIONS.defaultVersion(), VERSIONS.defaultVersion(), null, null, KafkaVersionTestUtils.PREVIOUS_METADATA_VERSION)); List<Condition> warnings = checker.run(true); assertThat(warnings, hasSize(1)); assertThat(warnings.get(0).getReason(), is("KafkaMetadataVersion")); assertThat(warnings.get(0).getMessage(), is("Metadata version is older than the Kafka version used by the cluster, which suggests that an upgrade is incomplete.")); }
@Override public DescriptiveUrl toDownloadUrl(final Path file, final Sharee sharee, final Object options, final PasswordCallback callback) throws BackgroundException { final Host bookmark = session.getHost(); final StringBuilder request = new StringBuilder(String.format("https://%s%s/apps/files_sharing/api/v1/shares?path=%s&shareType=%d&shareWith=%s", bookmark.getHostname(), new NextcloudHomeFeature(bookmark).find(NextcloudHomeFeature.Context.ocs).getAbsolute(), URIEncoder.encode(PathRelativizer.relativize(NextcloudHomeFeature.Context.files.home(bookmark).find().getAbsolute(), file.getAbsolute())), Sharee.world.equals(sharee) ? SHARE_TYPE_PUBLIC_LINK : SHARE_TYPE_USER, Sharee.world.equals(sharee) ? StringUtils.EMPTY : sharee.getIdentifier() )); final Credentials password = callback.prompt(bookmark, LocaleFactory.localizedString("Passphrase", "Cryptomator"), MessageFormat.format(LocaleFactory.localizedString("Create a passphrase required to access {0}", "Credentials"), file.getName()), new LoginOptions().anonymous(true).keychain(false).icon(bookmark.getProtocol().disk())); if(password.isPasswordAuthentication()) { request.append(String.format("&password=%s", URIEncoder.encode(password.getPassword()))); } final HttpPost resource = new HttpPost(request.toString()); resource.setHeader("OCS-APIRequest", "true"); resource.setHeader(HttpHeaders.ACCEPT, ContentType.APPLICATION_XML.getMimeType()); try { return session.getClient().execute(resource, new OcsDownloadShareResponseHandler()); } catch(HttpResponseException e) { throw new DefaultHttpResponseExceptionMappingService().map(e); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e); } }
@Test public void testToDownloadUrlPassword() throws Exception { final Path home = new NextcloudHomeFeature(session.getHost()).find(); final Path file = new DAVTouchFeature(new NextcloudWriteFeature(session)).touch(new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final DescriptiveUrl url = new NextcloudShareFeature(session).toDownloadUrl(file, Share.Sharee.world, null, new DisabledPasswordCallback() { @Override public Credentials prompt(final Host bookmark, final String title, final String reason, final LoginOptions options) { return new Credentials(null, new AlphanumericRandomStringService(10).random()); } }); assertNotSame(DescriptiveUrl.EMPTY, url); new DAVDeleteFeature(session).delete(Collections.singletonList(file), new DisabledPasswordCallback(), new Delete.DisabledCallback()); }
@Override public ColumnStatisticsObj aggregate(List<ColStatsObjWithSourceInfo> colStatsWithSourceInfo, List<String> partNames, boolean areAllPartsFound) throws MetaException { checkStatisticsList(colStatsWithSourceInfo); ColumnStatisticsObj statsObj = null; String colType; String colName = null; // check if all the ColumnStatisticsObjs contain stats and all the ndv are // bitvectors boolean doAllPartitionContainStats = partNames.size() == colStatsWithSourceInfo.size(); NumDistinctValueEstimator ndvEstimator = null; boolean areAllNDVEstimatorsMergeable = true; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); if (statsObj == null) { colName = cso.getColName(); colType = cso.getColType(); statsObj = ColumnStatsAggregatorFactory.newColumnStaticsObj(colName, colType, cso.getStatsData().getSetField()); LOG.trace("doAllPartitionContainStats for column: {} is: {}", colName, doAllPartitionContainStats); } DateColumnStatsDataInspector columnStatsData = dateInspectorFromStats(cso); // check if we can merge NDV estimators if (columnStatsData.getNdvEstimator() == null) { areAllNDVEstimatorsMergeable = false; break; } else { NumDistinctValueEstimator estimator = columnStatsData.getNdvEstimator(); if (ndvEstimator == null) { ndvEstimator = estimator; } else { if (!ndvEstimator.canMerge(estimator)) { areAllNDVEstimatorsMergeable = false; break; } } } } if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator); } LOG.debug("all of the bit vectors can merge for {} is {}", colName, areAllNDVEstimatorsMergeable); ColumnStatisticsData columnStatisticsData = initColumnStatisticsData(); if (doAllPartitionContainStats || colStatsWithSourceInfo.size() < 2) { DateColumnStatsDataInspector aggregateData = null; long lowerBound = 0; long higherBound = 0; double densityAvgSum = 0.0; DateColumnStatsMerger merger = new DateColumnStatsMerger(); for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); DateColumnStatsDataInspector newData = dateInspectorFromStats(cso); lowerBound = Math.max(lowerBound, newData.getNumDVs()); higherBound += newData.getNumDVs(); if (newData.isSetLowValue() && newData.isSetHighValue()) { densityAvgSum += ((double) diff(newData.getHighValue(), newData.getLowValue())) / newData.getNumDVs(); } if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { ndvEstimator.mergeEstimators(newData.getNdvEstimator()); } if (aggregateData == null) { aggregateData = newData.deepCopy(); } else { aggregateData.setLowValue(merger.mergeLowValue( merger.getLowValue(aggregateData), merger.getLowValue(newData))); aggregateData.setHighValue(merger.mergeHighValue( merger.getHighValue(aggregateData), merger.getHighValue(newData))); aggregateData.setNumNulls(merger.mergeNumNulls(aggregateData.getNumNulls(), newData.getNumNulls())); aggregateData.setNumDVs(merger.mergeNumDVs(aggregateData.getNumDVs(), newData.getNumDVs())); } } if (areAllNDVEstimatorsMergeable && ndvEstimator != null) { // if all the ColumnStatisticsObjs contain bitvectors, we do not need to // use uniform distribution assumption because we can merge bitvectors // to get a good estimation. aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); } else { long estimation; if (useDensityFunctionForNDVEstimation && aggregateData != null && aggregateData.isSetLowValue() && aggregateData.isSetHighValue()) { // We have estimation, lowerbound and higherbound. We use estimation // if it is between lowerbound and higherbound. double densityAvg = densityAvgSum / partNames.size(); estimation = (long) (diff(aggregateData.getHighValue(), aggregateData.getLowValue()) / densityAvg); if (estimation < lowerBound) { estimation = lowerBound; } else if (estimation > higherBound) { estimation = higherBound; } } else { estimation = (long) (lowerBound + (higherBound - lowerBound) * ndvTuner); } aggregateData.setNumDVs(estimation); } columnStatisticsData.setDateStats(aggregateData); } else { // TODO: bail out if missing stats are over a certain threshold // we need extrapolation LOG.debug("start extrapolation for {}", colName); Map<String, Integer> indexMap = new HashMap<>(); for (int index = 0; index < partNames.size(); index++) { indexMap.put(partNames.get(index), index); } Map<String, Double> adjustedIndexMap = new HashMap<>(); Map<String, ColumnStatisticsData> adjustedStatsMap = new HashMap<>(); // while we scan the css, we also get the densityAvg, lowerbound and // higherbound when useDensityFunctionForNDVEstimation is true. double densityAvgSum = 0.0; if (!areAllNDVEstimatorsMergeable) { // if not every partition uses bitvector for ndv, we just fall back to // the traditional extrapolation methods. for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); String partName = csp.getPartName(); DateColumnStatsData newData = cso.getStatsData().getDateStats(); if (useDensityFunctionForNDVEstimation && newData.isSetLowValue() && newData.isSetHighValue()) { densityAvgSum += ((double) diff(newData.getHighValue(), newData.getLowValue())) / newData.getNumDVs(); } adjustedIndexMap.put(partName, (double) indexMap.get(partName)); adjustedStatsMap.put(partName, cso.getStatsData()); } } else { // we first merge all the adjacent bitvectors that we could merge and // derive new partition names and index. StringBuilder pseudoPartName = new StringBuilder(); double pseudoIndexSum = 0; int length = 0; int curIndex = -1; DateColumnStatsDataInspector aggregateData = null; for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) { ColumnStatisticsObj cso = csp.getColStatsObj(); String partName = csp.getPartName(); DateColumnStatsDataInspector newData = dateInspectorFromStats(cso); // newData.isSetBitVectors() should be true for sure because we // already checked it before. if (indexMap.get(partName) != curIndex) { // There is bitvector, but it is not adjacent to the previous ones. if (length > 0) { // we have to set ndv adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length); aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); ColumnStatisticsData csd = new ColumnStatisticsData(); csd.setDateStats(aggregateData); adjustedStatsMap.put(pseudoPartName.toString(), csd); if (useDensityFunctionForNDVEstimation) { densityAvgSum += ((double) diff(aggregateData.getHighValue(), aggregateData.getLowValue())) / aggregateData.getNumDVs(); } // reset everything pseudoPartName = new StringBuilder(); pseudoIndexSum = 0; length = 0; ndvEstimator = NumDistinctValueEstimatorFactory.getEmptyNumDistinctValueEstimator(ndvEstimator); } aggregateData = null; } curIndex = indexMap.get(partName); pseudoPartName.append(partName); pseudoIndexSum += curIndex; length++; curIndex++; if (aggregateData == null) { aggregateData = newData.deepCopy(); } else { aggregateData.setLowValue(min(aggregateData.getLowValue(), newData.getLowValue())); aggregateData.setHighValue(max(aggregateData.getHighValue(), newData.getHighValue())); aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls()); } ndvEstimator.mergeEstimators(newData.getNdvEstimator()); } if (length > 0) { // we have to set ndv adjustedIndexMap.put(pseudoPartName.toString(), pseudoIndexSum / length); aggregateData.setNumDVs(ndvEstimator.estimateNumDistinctValues()); ColumnStatisticsData csd = new ColumnStatisticsData(); csd.setDateStats(aggregateData); adjustedStatsMap.put(pseudoPartName.toString(), csd); if (useDensityFunctionForNDVEstimation) { densityAvgSum += ((double) diff(aggregateData.getHighValue(), aggregateData.getLowValue())) / aggregateData.getNumDVs(); } } } extrapolate(columnStatisticsData, partNames.size(), colStatsWithSourceInfo.size(), adjustedIndexMap, adjustedStatsMap, densityAvgSum / adjustedStatsMap.size()); } LOG.debug( "Ndv estimation for {} is {}. # of partitions requested: {}. # of partitions found: {}", colName, columnStatisticsData.getDateStats().getNumDVs(), partNames.size(), colStatsWithSourceInfo.size()); KllHistogramEstimator mergedKllHistogramEstimator = mergeHistograms(colStatsWithSourceInfo); if (mergedKllHistogramEstimator != null) { columnStatisticsData.getDateStats().setHistogram(mergedKllHistogramEstimator.serialize()); } statsObj.setStatsData(columnStatisticsData); return statsObj; }
@Test public void testAggregateMultiStatsWhenOnlySomeAvailable() throws MetaException { List<String> partitions = Arrays.asList("part1", "part2", "part3", "part4"); long[] values1 = { DATE_1.getDaysSinceEpoch(), DATE_2.getDaysSinceEpoch(), DATE_3.getDaysSinceEpoch() }; ColumnStatisticsData data1 = new ColStatsBuilder<>(Date.class).numNulls(1).numDVs(3) .low(DATE_1).high(DATE_3).hll(values1).kll(values1).build(); long[] values3 = { DATE_7.getDaysSinceEpoch() }; ColumnStatisticsData data3 = new ColStatsBuilder<>(Date.class).numNulls(3).numDVs(1).low(DATE_7).high(DATE_7) .hll(DATE_7.getDaysSinceEpoch()).kll(values3).build(); long[] values4 = { DATE_3.getDaysSinceEpoch(), DATE_4.getDaysSinceEpoch(), DATE_5.getDaysSinceEpoch() }; ColumnStatisticsData data4 = new ColStatsBuilder<>(Date.class).numNulls(2).numDVs(3) .low(DATE_3).high(DATE_5).hll(values4).kll(values4).build(); List<ColStatsObjWithSourceInfo> statsList = Arrays.asList( createStatsWithInfo(data1, TABLE, COL, partitions.get(0)), createStatsWithInfo(data3, TABLE, COL, partitions.get(2)), createStatsWithInfo(data4, TABLE, COL, partitions.get(3))); DateColumnStatsAggregator aggregator = new DateColumnStatsAggregator(); ColumnStatisticsObj computedStatsObj = aggregator.aggregate(statsList, partitions, false); // hll in case of missing stats is left as null, only numDVs is updated ColumnStatisticsData expectedStats = new ColStatsBuilder<>(Date.class).numNulls(8).numDVs(4) .low(DATE_1).high(DATE_9).kll(Longs.concat(values1, values3, values4)).build(); assertEqualStatistics(expectedStats, computedStatsObj.getStatsData()); }
@Override public void open(Map<String, Object> config, SourceContext sourceContext) throws Exception { this.config = config; this.sourceContext = sourceContext; this.intermediateTopicName = SourceConfigUtils.computeBatchSourceIntermediateTopicName(sourceContext.getTenant(), sourceContext.getNamespace(), sourceContext.getSourceName()).toString(); this.discoveryThread = Executors.newSingleThreadExecutor( new DefaultThreadFactory( String.format("%s-batch-source-discovery", FunctionCommon.getFullyQualifiedName( sourceContext.getTenant(), sourceContext.getNamespace(), sourceContext.getSourceName())))); this.getBatchSourceConfigs(config); this.initializeBatchSource(); this.start(); }
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Bad config passed to TestTriggerer") public void testWithoutRightTriggererConfig() throws Exception { Map<String, Object> badConfig = new HashMap<>(); badConfig.put("something", "else"); testBatchConfig.setDiscoveryTriggererConfig(badConfig); config.put(BatchSourceConfig.BATCHSOURCE_CONFIG_KEY, new Gson().toJson(testBatchConfig)); batchSourceExecutor.open(config, context); }
@CheckForNull @Override public Set<Path> branchChangedFiles(String targetBranchName, Path rootBaseDir) { return Optional.ofNullable((branchChangedFilesWithFileMovementDetection(targetBranchName, rootBaseDir))) .map(GitScmProvider::extractAbsoluteFilePaths) .orElse(null); }
@Test public void branchChangedFiles_should_return_null_when_branch_nonexistent() { assertThat(newScmProvider().branchChangedFiles("nonexistent", worktree)).isNull(); }
boolean isUpstreamDefinite() { if (upstreamDefinite == null) { upstreamDefinite = isRoot() || prev.isTokenDefinite() && prev.isUpstreamDefinite(); } return upstreamDefinite; }
@Test public void is_upstream_definite_in_complex_case() { assertThat(makePathReturningTail(makePPT("foo"), makePPT("bar"), makePPT("baz")).isUpstreamDefinite()).isTrue(); assertThat(makePathReturningTail(makePPT("foo"), new WildcardPathToken()).isUpstreamDefinite()).isTrue(); assertThat(makePathReturningTail(new WildcardPathToken(), makePPT("bar"), makePPT("baz")).isUpstreamDefinite()).isFalse(); }
public List<QueuedCommand> getRestoreCommands(final Duration duration) { if (commandTopicBackup.commandTopicCorruption()) { log.warn("Corruption detected. " + "Use backup to restore command topic."); return Collections.emptyList(); } return getAllCommandsInCommandTopic( commandConsumer, commandTopicPartition, Optional.of(commandTopicBackup), duration ); }
@Test public void shouldGetRestoreCommandsCorrectly() { // Given: when(commandConsumer.poll(any(Duration.class))) .thenReturn(someConsumerRecords( record1, record2)) .thenReturn(someConsumerRecords( record3)); when(commandConsumer.endOffsets(any())).thenReturn(ImmutableMap.of(TOPIC_PARTITION, 3L)); when(commandConsumer.position(TOPIC_PARTITION)).thenReturn(0L, 2L, 3L); // When: final List<QueuedCommand> queuedCommandList = commandTopic .getRestoreCommands(Duration.ofMillis(1)); // Then: verify(commandConsumer).seekToBeginning(topicPartitionsCaptor.capture()); assertThat(topicPartitionsCaptor.getValue(), equalTo(Collections.singletonList(new TopicPartition(COMMAND_TOPIC_NAME, 0)))); assertThat(queuedCommandList, equalTo(ImmutableList.of( new QueuedCommand(commandId1, command1, Optional.empty(), 0L), new QueuedCommand(commandId2, command2, Optional.empty(),1L), new QueuedCommand(commandId3, command3, Optional.empty(), 2L)))); }
public static Duration parse(final String text) { try { final String[] parts = text.split("\\s"); if (parts.length != 2) { throw new IllegalArgumentException("Expected 2 tokens, got: " + parts.length); } final long size = parseNumeric(parts[0]); return buildDuration(size, parts[1]); } catch (final Exception e) { throw new IllegalArgumentException("Invalid duration: '" + text + "'. " + e.getMessage(), e); } }
@Test public void shouldSupportNanos() { assertThat(DurationParser.parse("27 NANOSECONDS"), is(Duration.ofNanos(27))); }
boolean isWriteEnclosureForFieldName( ValueMetaInterface v, String fieldName ) { return ( isWriteEnclosed( v ) ) || isEnclosureFixDisabledAndContainsSeparatorOrEnclosure( fieldName.getBytes() ); }
@Test public void testWriteEnclosureForFieldName() { TextFileOutputData data = new TextFileOutputData(); data.binarySeparator = new byte[1]; data.binaryEnclosure = new byte[1]; data.writer = new ByteArrayOutputStream(); TextFileOutputMeta meta = getTextFileOutputMeta(); stepMockHelper.stepMeta.setStepMetaInterface( meta ); TextFileOutput textFileOutput = getTextFileOutput(data, meta); ValueMetaBase valueMetaInterface = getValueMetaInterface(); assertFalse(textFileOutput.isWriteEnclosureForFieldName(valueMetaInterface, "fieldName")); }
@Override public void prepare(ExecutorDetails exec) { this.exec = exec; }
@Test public void testFillUpRackAndSpilloverToNextRack() { INimbus iNimbus = new INimbusTest(); double compPcore = 100; double compOnHeap = 775; double compOffHeap = 25; int topo1NumSpouts = 1; int topo1NumBolts = 5; int topo1SpoutParallelism = 100; int topo1BoltParallelism = 200; final int numRacks = 3; final int numSupersPerRack = 10; final int numPortsPerSuper = 6; final int numZonesPerHost = 1; final double numaResourceMultiplier = 1.0; int rackStartNum = 0; int supStartNum = 0; long compPerRack = (topo1NumSpouts * topo1SpoutParallelism + topo1NumBolts * topo1BoltParallelism) * 4/5; // not enough for topo1 long compPerSuper = compPerRack / numSupersPerRack; double cpuPerSuper = compPcore * compPerSuper; double memPerSuper = (compOnHeap + compOffHeap) * compPerSuper; double topo1MaxHeapSize = memPerSuper; final String topoName1 = "topology1"; Map<String, SupervisorDetails> supMap = genSupervisorsWithRacksAndNuma( numRacks, numSupersPerRack, numZonesPerHost, numPortsPerSuper, rackStartNum, supStartNum, cpuPerSuper, memPerSuper, Collections.emptyMap(), numaResourceMultiplier); TestDNSToSwitchMapping testDNSToSwitchMapping = new TestDNSToSwitchMapping(supMap.values()); Config config = new Config(); config.putAll(createGrasClusterConfig(compPcore, compOnHeap, compOffHeap, null, null)); config.put(Config.TOPOLOGY_SCHEDULER_STRATEGY, GenericResourceAwareStrategy.class.getName()); IScheduler scheduler = new ResourceAwareScheduler(); scheduler.prepare(config, new StormMetricsRegistry()); TopologyDetails td1 = genTopology(topoName1, config, topo1NumSpouts, topo1NumBolts, topo1SpoutParallelism, topo1BoltParallelism, 0, 0, "user", topo1MaxHeapSize); //Schedule the topo1 topology and ensure it fits on 2 racks Topologies topologies = new Topologies(td1); Cluster cluster = new Cluster(iNimbus, new ResourceMetrics(new StormMetricsRegistry()), supMap, new HashMap<>(), topologies, config); cluster.setNetworkTopography(testDNSToSwitchMapping.getRackToHosts()); scheduler.schedule(topologies, cluster); Set<String> assignedRacks = cluster.getAssignedRacks(td1.getId()); assertEquals(2, assignedRacks.size(), "Racks for topology=" + td1.getId() + " is " + assignedRacks); }
public void runExtractor(Message msg) { try(final Timer.Context ignored = completeTimer.time()) { final String field; try (final Timer.Context ignored2 = conditionTimer.time()) { // We can only work on Strings. if (!(msg.getField(sourceField) instanceof String)) { conditionMissesCounter.inc(); return; } field = (String) msg.getField(sourceField); // Decide if to extract at all. if (conditionType.equals(ConditionType.STRING)) { if (field.contains(conditionValue)) { conditionHitsCounter.inc(); } else { conditionMissesCounter.inc(); return; } } else if (conditionType.equals(ConditionType.REGEX)) { if (regexConditionPattern.matcher(field).find()) { conditionHitsCounter.inc(); } else { conditionMissesCounter.inc(); return; } } } try (final Timer.Context ignored2 = executionTimer.time()) { Result[] results; try { results = run(field); } catch (ExtractorException e) { final String error = "Could not apply extractor <" + getTitle() + " (" + getId() + ")>"; msg.addProcessingError(new Message.ProcessingError( ProcessingFailureCause.ExtractorException, error, ExceptionUtils.getRootCauseMessage(e))); return; } if (results == null || results.length == 0 || Arrays.stream(results).anyMatch(result -> result.getValue() == null)) { return; } else if (results.length == 1 && results[0].target == null) { // results[0].target is null if this extractor cannot produce multiple fields use targetField in that case msg.addField(targetField, results[0].getValue()); } else { for (final Result result : results) { msg.addField(result.getTarget(), result.getValue()); } } // Remove original from message? if (cursorStrategy.equals(CursorStrategy.CUT) && !targetField.equals(sourceField) && !Message.RESERVED_FIELDS.contains(sourceField) && results[0].beginIndex != -1) { final StringBuilder sb = new StringBuilder(field); final List<Result> reverseList = Arrays.stream(results) .sorted(Comparator.<Result>comparingInt(result -> result.endIndex).reversed()) .collect(Collectors.toList()); // remove all from reverse so that the indices still match for (final Result result : reverseList) { sb.delete(result.getBeginIndex(), result.getEndIndex()); } final String builtString = sb.toString(); final String finalResult = builtString.trim().isEmpty() ? "fullyCutByExtractor" : builtString; msg.removeField(sourceField); // TODO don't add an empty field back, or rather don't add fullyCutByExtractor msg.addField(sourceField, finalResult); } runConverters(msg); } } }
@Test public void testCursorStrategyCutWithAllTextCut() throws Exception { final TestExtractor extractor = new TestExtractor.Builder() .cursorStrategy(CUT) .sourceField("msg") .callback(new Callable<Result[]>() { @Override public Result[] call() throws Exception { return new Result[]{ new Result("the hello", 0, 9) }; } }) .build(); final Message msg = createMessage("message"); msg.addField("msg", "the hello"); extractor.runExtractor(msg); // If all data is cut from the source field, the "fullyCutByExtractor" string gets inserted. assertThat(msg.getField("msg")).isEqualTo("fullyCutByExtractor"); }
@Override public WebSocketClientExtension handshakeExtension(WebSocketExtensionData extensionData) { if (!PERMESSAGE_DEFLATE_EXTENSION.equals(extensionData.name())) { return null; } boolean succeed = true; int clientWindowSize = MAX_WINDOW_SIZE; int serverWindowSize = MAX_WINDOW_SIZE; boolean serverNoContext = false; boolean clientNoContext = false; Iterator<Entry<String, String>> parametersIterator = extensionData.parameters().entrySet().iterator(); while (succeed && parametersIterator.hasNext()) { Entry<String, String> parameter = parametersIterator.next(); if (CLIENT_MAX_WINDOW.equalsIgnoreCase(parameter.getKey())) { // allowed client_window_size_bits if (allowClientWindowSize) { clientWindowSize = Integer.parseInt(parameter.getValue()); if (clientWindowSize > MAX_WINDOW_SIZE || clientWindowSize < MIN_WINDOW_SIZE) { succeed = false; } } else { succeed = false; } } else if (SERVER_MAX_WINDOW.equalsIgnoreCase(parameter.getKey())) { // acknowledged server_window_size_bits serverWindowSize = Integer.parseInt(parameter.getValue()); if (serverWindowSize > MAX_WINDOW_SIZE || serverWindowSize < MIN_WINDOW_SIZE) { succeed = false; } } else if (CLIENT_NO_CONTEXT.equalsIgnoreCase(parameter.getKey())) { // allowed client_no_context_takeover if (allowClientNoContext) { clientNoContext = true; } else { succeed = false; } } else if (SERVER_NO_CONTEXT.equalsIgnoreCase(parameter.getKey())) { // acknowledged server_no_context_takeover serverNoContext = true; } else { // unknown parameter succeed = false; } } if ((requestedServerNoContext && !serverNoContext) || requestedServerWindowSize < serverWindowSize) { succeed = false; } if (succeed) { return new PermessageDeflateExtension(serverNoContext, serverWindowSize, clientNoContext, clientWindowSize, extensionFilterProvider); } else { return null; } }
@Test public void testCustomHandshake() { WebSocketClientExtension extension; Map<String, String> parameters; // initialize PerMessageDeflateClientExtensionHandshaker handshaker = new PerMessageDeflateClientExtensionHandshaker(6, true, 10, true, true); parameters = new HashMap<String, String>(); parameters.put(CLIENT_MAX_WINDOW, "12"); parameters.put(SERVER_MAX_WINDOW, "8"); parameters.put(CLIENT_NO_CONTEXT, null); parameters.put(SERVER_NO_CONTEXT, null); // execute extension = handshaker.handshakeExtension( new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, parameters)); // test assertNotNull(extension); assertEquals(RSV1, extension.rsv()); assertTrue(extension.newExtensionDecoder() instanceof PerMessageDeflateDecoder); assertTrue(extension.newExtensionEncoder() instanceof PerMessageDeflateEncoder); // initialize parameters = new HashMap<String, String>(); parameters.put(SERVER_MAX_WINDOW, "10"); parameters.put(SERVER_NO_CONTEXT, null); // execute extension = handshaker.handshakeExtension( new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, parameters)); // test assertNotNull(extension); assertEquals(RSV1, extension.rsv()); assertTrue(extension.newExtensionDecoder() instanceof PerMessageDeflateDecoder); assertTrue(extension.newExtensionEncoder() instanceof PerMessageDeflateEncoder); // initialize parameters = new HashMap<String, String>(); // execute extension = handshaker.handshakeExtension( new WebSocketExtensionData(PERMESSAGE_DEFLATE_EXTENSION, parameters)); // test assertNull(extension); }
Plugin create(Options.Plugin plugin) { try { return instantiate(plugin.pluginString(), plugin.pluginClass(), plugin.argument()); } catch (IOException | URISyntaxException e) { throw new CucumberException(e); } }
@Test void plugin_does_not_buffer_its_output() { PrintStream previousSystemOut = System.out; OutputStream mockSystemOut = new ByteArrayOutputStream(); try { System.setOut(new PrintStream(mockSystemOut)); // Need to create a new plugin factory here since we need it to pick // up the new value of System.out fc = new PluginFactory(); PluginOption option = parse("progress"); ProgressFormatter plugin = (ProgressFormatter) fc.create(option); EventBus bus = new TimeServiceEventBus(new ClockStub(ZERO), UUID::randomUUID); plugin.setEventPublisher(bus); Result result = new Result(Status.PASSED, ZERO, null); TestStepFinished event = new TestStepFinished(bus.getInstant(), mock(TestCase.class), mock(PickleStepTestStep.class), result); bus.send(event); assertThat(mockSystemOut.toString(), is(not(equalTo("")))); } finally { System.setOut(previousSystemOut); } }
@Override public DevOpsProjectCreationContext create(AlmSettingDto almSettingDto, DevOpsProjectDescriptor devOpsProjectDescriptor) { AccessToken accessToken = getAccessToken(almSettingDto); return createDevOpsProject(almSettingDto, devOpsProjectDescriptor, accessToken); }
@Test void create_whenRepoFound_createsDevOpsProject() { AlmSettingDto almSettingDto = mockAlmSettingDto(); AlmPatDto almPatDto = mockValidAccessToken(almSettingDto); Repository repository = mockGitHubRepository(almPatDto, almSettingDto); DevOpsProjectCreationContext devOpsProjectCreationContext = githubDevOpsProjectService.create(almSettingDto, DEV_OPS_PROJECT_DESCRIPTOR); assertThat(devOpsProjectCreationContext.name()).isEqualTo(repository.getName()); assertThat(devOpsProjectCreationContext.fullName()).isEqualTo(repository.getFullName()); assertThat(devOpsProjectCreationContext.devOpsPlatformIdentifier()).isEqualTo(repository.getFullName()); assertThat(devOpsProjectCreationContext.isPublic()).isTrue(); assertThat(devOpsProjectCreationContext.defaultBranchName()).isEqualTo(repository.getDefaultBranch()); }
@Override public float floatValue() { return value; }
@Test void testVeryLargeValues() throws IOException { double largeValue = Float.MAX_VALUE * 10d; assertEquals(1, Double.compare(largeValue, Float.MAX_VALUE), "Test must be performed with a value larger than Float.MAX_VALUE."); // 1.4012984643248171E-46 String asString = String.valueOf(largeValue); COSFloat cosFloat = new COSFloat(asString); assertEquals(Float.MAX_VALUE, cosFloat.floatValue()); // 0.00000000000000000000000000000000000000000000014012984643248171 asString = new BigDecimal(asString).toPlainString(); cosFloat = new COSFloat(asString); assertEquals(Float.MAX_VALUE, cosFloat.floatValue()); largeValue *= -1; // -1.4012984643248171E-46 asString = String.valueOf(largeValue); cosFloat = new COSFloat(asString); assertEquals(-Float.MAX_VALUE, cosFloat.floatValue()); // -0.00000000000000000000000000000000000000000000014012984643248171 asString = new BigDecimal(asString).toPlainString(); cosFloat = new COSFloat(asString); assertEquals(-Float.MAX_VALUE, cosFloat.floatValue()); }
public static String encodeString(String toEncode) { Preconditions.checkArgument(toEncode != null, "Invalid string to encode: null"); try { return URLEncoder.encode(toEncode, StandardCharsets.UTF_8.name()); } catch (UnsupportedEncodingException e) { throw new UncheckedIOException( String.format("Failed to URL encode '%s': UTF-8 encoding is not supported", toEncode), e); } }
@Test @SuppressWarnings("checkstyle:AvoidEscapedUnicodeCharacters") public void testOAuth2URLEncoding() { // from OAuth2, RFC 6749 Appendix B. String utf8 = "\u0020\u0025\u0026\u002B\u00A3\u20AC"; String expected = "+%25%26%2B%C2%A3%E2%82%AC"; assertThat(RESTUtil.encodeString(utf8)).isEqualTo(expected); }
@Bean public TimerRegistry timerRegistry( TimerConfigurationProperties timerConfigurationProperties, EventConsumerRegistry<TimerEvent> timerEventConsumerRegistry, RegistryEventConsumer<Timer> timerRegistryEventConsumer, @Qualifier("compositeTimerCustomizer") CompositeCustomizer<TimerConfigCustomizer> compositeTimerCustomizer, @Autowired(required = false) MeterRegistry registry ) { TimerRegistry timerRegistry = createTimerRegistry(timerConfigurationProperties, timerRegistryEventConsumer, compositeTimerCustomizer, registry); registerEventConsumer(timerRegistry, timerEventConsumerRegistry, timerConfigurationProperties); initTimerRegistry(timerRegistry, timerConfigurationProperties, compositeTimerCustomizer); return timerRegistry; }
@Test public void shouldConfigureInstancesUsingPredefinedDefaultConfig() { InstanceProperties instanceProperties1 = new InstanceProperties() .setMetricNames("resilience4j.timer.operations1"); InstanceProperties instanceProperties2 = new InstanceProperties() .setOnFailureTagResolver(FixedOnFailureTagResolver.class); TimerConfigurationProperties configurationProperties = new TimerConfigurationProperties(); configurationProperties.getInstances().put("backend1", instanceProperties1); configurationProperties.getInstances().put("backend2", instanceProperties2); TimerConfiguration configuration = new TimerConfiguration(); TimerRegistry registry = configuration.timerRegistry( configurationProperties, new DefaultEventConsumerRegistry<>(), new CompositeRegistryEventConsumer<>(emptyList()), new CompositeCustomizer<>(emptyList()), new SimpleMeterRegistry() ); assertThat(registry.getAllTimers().count()).isEqualTo(2); Timer timer1 = registry.timer("backend1"); assertThat(timer1).isNotNull(); assertThat(timer1.getTimerConfig().getMetricNames()).isEqualTo("resilience4j.timer.operations1"); assertThat(timer1.getTimerConfig().getOnFailureTagResolver().apply(new RuntimeException())).isEqualTo("RuntimeException"); Timer timer2 = registry.timer("backend2"); assertThat(timer2).isNotNull(); assertThat(timer2.getTimerConfig().getMetricNames()).isEqualTo("resilience4j.timer.calls"); assertThat(timer2.getTimerConfig().getOnFailureTagResolver()).isInstanceOf(FixedOnFailureTagResolver.class); Timer timer3 = registry.timer("backend3"); assertThat(timer3).isNotNull(); assertThat(timer3.getTimerConfig().getMetricNames()).isEqualTo("resilience4j.timer.calls"); assertThat(timer3.getTimerConfig().getOnFailureTagResolver().apply(new RuntimeException())).isEqualTo("RuntimeException"); }
public void setContract(@Nullable Produce contract) { this.contract = contract; setStoredContract(contract); handleContractState(); }
@Test public void cabbageContractOnionHarvestableAndCabbageHarvestable() { final long unixNow = Instant.now().getEpochSecond(); // Get the two allotment patches final FarmingPatch patch1 = farmingGuildPatches.get(Varbits.FARMING_4773); final FarmingPatch patch2 = farmingGuildPatches.get(Varbits.FARMING_4774); assertNotNull(patch1); assertNotNull(patch2); // Specify the two allotment patches when(farmingTracker.predictPatch(patch1)) .thenReturn(new PatchPrediction(Produce.ONION, CropState.HARVESTABLE, unixNow, 3, 3)); when(farmingTracker.predictPatch(patch2)) .thenReturn(new PatchPrediction(Produce.CABBAGE, CropState.HARVESTABLE, unixNow, 3, 3)); farmingContractManager.setContract(Produce.CABBAGE); assertEquals(SummaryState.COMPLETED, farmingContractManager.getSummary()); }
public void onFailedPartitionRequest() { inputGate.triggerPartitionStateCheck(partitionId, channelInfo); }
@Test void testOnFailedPartitionRequestDoesNotBlockNetworkThreads() throws Exception { final long testBlockedWaitTimeoutMillis = 30_000L; final PartitionProducerStateChecker partitionProducerStateChecker = (jobId, intermediateDataSetId, resultPartitionId) -> CompletableFuture.completedFuture(ExecutionState.RUNNING); final NettyShuffleEnvironment shuffleEnvironment = new NettyShuffleEnvironmentBuilder().build(); final Task task = new TestTaskBuilder(shuffleEnvironment) .setPartitionProducerStateChecker(partitionProducerStateChecker) .build(EXECUTOR_EXTENSION.getExecutor()); final SingleInputGate inputGate = new SingleInputGateBuilder().setPartitionProducerStateProvider(task).build(); TestTaskBuilder.setTaskState(task, ExecutionState.RUNNING); final OneShotLatch ready = new OneShotLatch(); final OneShotLatch blocker = new OneShotLatch(); final AtomicBoolean timedOutOrInterrupted = new AtomicBoolean(false); final ConnectionManager blockingConnectionManager = new TestingConnectionManager() { @Override public PartitionRequestClient createPartitionRequestClient( ConnectionID connectionId) { ready.trigger(); try { // We block here, in a section that holds the // SingleInputGate#requestLock blocker.await(testBlockedWaitTimeoutMillis, TimeUnit.MILLISECONDS); } catch (InterruptedException | TimeoutException e) { timedOutOrInterrupted.set(true); } return new TestingPartitionRequestClient(); } }; final RemoteInputChannel remoteInputChannel = InputChannelBuilder.newBuilder() .setConnectionManager(blockingConnectionManager) .buildRemoteChannel(inputGate); inputGate.setInputChannels(remoteInputChannel); final Thread simulatedNetworkThread = new Thread( () -> { try { ready.await(); // We want to make sure that our simulated network thread does not // block on // SingleInputGate#requestLock as well through this call. remoteInputChannel.onFailedPartitionRequest(); // Will only give free the blocker if we did not block ourselves. blocker.trigger(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } }); simulatedNetworkThread.start(); // The entry point to that will lead us into // blockingConnectionManager#createPartitionRequestClient(...). inputGate.requestPartitions(); simulatedNetworkThread.join(); assertThat(timedOutOrInterrupted) .withFailMessage( "Test ended by timeout or interruption - this indicates that the network thread was blocked.") .isFalse(); }
public abstract void filter(Metadata metadata) throws TikaException;
@Test public void testConfigExcludeFilter() throws Exception { TikaConfig config = getConfig("TIKA-3137-exclude.xml"); Metadata metadata = new Metadata(); metadata.set("title", "title"); metadata.set("author", "author"); metadata.set("content", "content"); config.getMetadataFilter().filter(metadata); assertEquals(1, metadata.size()); assertEquals("content", metadata.get("content")); }
static Object parseCell(String cell, Schema.Field field) { Schema.FieldType fieldType = field.getType(); try { switch (fieldType.getTypeName()) { case STRING: return cell; case INT16: return Short.parseShort(cell); case INT32: return Integer.parseInt(cell); case INT64: return Long.parseLong(cell); case BOOLEAN: return Boolean.parseBoolean(cell); case BYTE: return Byte.parseByte(cell); case DECIMAL: return new BigDecimal(cell); case DOUBLE: return Double.parseDouble(cell); case FLOAT: return Float.parseFloat(cell); case DATETIME: return Instant.parse(cell); default: throw new UnsupportedOperationException( "Unsupported type: " + fieldType + ", consider using withCustomRecordParsing"); } } catch (IllegalArgumentException e) { throw new IllegalArgumentException( e.getMessage() + " field " + field.getName() + " was received -- type mismatch"); } }
@Test public void givenLongWithSurroundingSpaces_throws() { Long longNum = Long.parseLong("3400000000"); DefaultMapEntry cellToExpectedValue = new DefaultMapEntry(" 12 ", longNum); Schema schema = Schema.builder() .addInt16Field("a_short") .addInt32Field("an_integer") .addInt64Field("a_long") .build(); IllegalArgumentException e = assertThrows( IllegalArgumentException.class, () -> CsvIOParseHelpers.parseCell( cellToExpectedValue.getKey().toString(), schema.getField("a_long"))); assertEquals( "For input string: \"" + cellToExpectedValue.getKey() + "\" field " + schema.getField("a_long").getName() + " was received -- type mismatch", e.getMessage()); }
@Override public void renameTable(TableIdentifier from, TableIdentifier to) { // check new namespace exists if (!namespaceExists(to.namespace())) { throw new NoSuchNamespaceException( "Cannot rename %s to %s because namespace %s does not exist", from, to, to.namespace()); } // keep metadata Table fromTable; String fromTableDbName = IcebergToGlueConverter.getDatabaseName(from, awsProperties.glueCatalogSkipNameValidation()); String fromTableName = IcebergToGlueConverter.getTableName(from, awsProperties.glueCatalogSkipNameValidation()); String toTableDbName = IcebergToGlueConverter.getDatabaseName(to, awsProperties.glueCatalogSkipNameValidation()); String toTableName = IcebergToGlueConverter.getTableName(to, awsProperties.glueCatalogSkipNameValidation()); try { GetTableResponse response = glue.getTable( GetTableRequest.builder() .catalogId(awsProperties.glueCatalogId()) .databaseName(fromTableDbName) .name(fromTableName) .build()); fromTable = response.table(); } catch (EntityNotFoundException e) { throw new NoSuchTableException( e, "Cannot rename %s because the table does not exist in Glue", from); } // use the same Glue info to create the new table, pointing to the old metadata TableInput.Builder tableInputBuilder = TableInput.builder() .owner(fromTable.owner()) .tableType(fromTable.tableType()) .parameters(fromTable.parameters()) .storageDescriptor(fromTable.storageDescriptor()); glue.createTable( CreateTableRequest.builder() .catalogId(awsProperties.glueCatalogId()) .databaseName(toTableDbName) .tableInput(tableInputBuilder.name(toTableName).build()) .build()); LOG.info("created rename destination table {}", to); try { dropTable(from, false); } catch (Exception e) { // rollback, delete renamed table LOG.error( "Fail to drop old table {} after renaming to {}, rollback to use the old table", from, to, e); glue.deleteTable( DeleteTableRequest.builder() .catalogId(awsProperties.glueCatalogId()) .databaseName(toTableDbName) .name(toTableName) .build()); throw e; } LOG.info("Successfully renamed table from {} to {}", from, to); }
@Test public void testRenameTable() { AtomicInteger counter = new AtomicInteger(1); Map<String, String> properties = Maps.newHashMap(); properties.put( BaseMetastoreTableOperations.TABLE_TYPE_PROP, BaseMetastoreTableOperations.ICEBERG_TABLE_TYPE_VALUE); Mockito.doReturn( GetTableResponse.builder() .table( Table.builder().databaseName("db1").name("t1").parameters(properties).build()) .build()) .when(glue) .getTable(Mockito.any(GetTableRequest.class)); Mockito.doReturn(GetTablesResponse.builder().build()) .when(glue) .getTables(Mockito.any(GetTablesRequest.class)); Mockito.doReturn( GetDatabaseResponse.builder().database(Database.builder().name("db1").build()).build()) .when(glue) .getDatabase(Mockito.any(GetDatabaseRequest.class)); Mockito.doAnswer( new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { counter.decrementAndGet(); return DeleteTableResponse.builder().build(); } }) .when(glue) .deleteTable(Mockito.any(DeleteTableRequest.class)); glueCatalog.dropTable(TableIdentifier.of("db1", "t1")); assertThat(counter.get()).isEqualTo(0); }
@Override public CircuitBreaker circuitBreaker(String name) { return circuitBreaker(name, getDefaultConfig()); }
@Test public void testCreateCircuitBreakerWithConfigName() { CircuitBreakerRegistry circuitBreakerRegistry = CircuitBreakerRegistry.ofDefaults(); circuitBreakerRegistry.addConfiguration("testConfig", CircuitBreakerConfig.custom().slidingWindowSize(5).build()); final CircuitBreaker circuitBreaker = circuitBreakerRegistry .circuitBreaker("circuitBreaker", "testConfig"); assertThat(circuitBreaker).isNotNull(); assertThat(circuitBreaker.getCircuitBreakerConfig().getSlidingWindowSize()).isEqualTo(5); }
public String format(Date then) { if (then == null) then = now(); Duration d = approximateDuration(then); return format(d); }
@Test public void testWithinTwoHoursRounding() throws Exception { PrettyTime t = new PrettyTime(now); Assert.assertEquals("2 hours ago", t.format(now.minusHours(1).minusMinutes(45))); }
public Query generateChecksumQuery(QualifiedName tableName, List<Column> columns, Optional<Expression> partitionPredicate) { ImmutableList.Builder<SelectItem> selectItems = ImmutableList.builder(); selectItems.add(new SingleColumn(new FunctionCall(QualifiedName.of("count"), ImmutableList.of()))); for (Column column : columns) { selectItems.addAll(columnValidators.get(column.getCategory()).get().generateChecksumColumns(column)); } return simpleQuery(new Select(false, selectItems.build()), new Table(tableName), partitionPredicate, Optional.empty()); }
@Test public void testValidateStringAsDoubleChecksumQuery() { Query checksumQuery = stringAsDoubleValidator.generateChecksumQuery( QualifiedName.of("test:di"), ImmutableList.of( VARCHAR_COLUMN, VARCHAR_ARRAY_COLUMN, MAP_VARCHAR_VARCHAR_COLUMN), Optional.empty()); Statement expectedChecksumQuery = sqlParser.createStatement( "SELECT\n" + " \"count\"(*)\n" + ", \"checksum\"(\"varchar\") \"varchar$checksum\"\n" + ", \"sum\"(TRY_CAST(\"varchar\" AS double)) FILTER (WHERE \"is_finite\"(TRY_CAST(\"varchar\" AS double))) \"varchar_as_double$sum\"\n" + ", \"count\"(TRY_CAST(\"varchar\" AS double)) FILTER (WHERE \"is_nan\"(TRY_CAST(\"varchar\" AS double))) \"varchar_as_double$nan_count\"\n" + ", \"count\"(TRY_CAST(\"varchar\" AS double)) FILTER (WHERE (TRY_CAST(\"varchar\" AS double) = \"infinity\"())) \"varchar_as_double$pos_inf_count\"\n" + ", \"count\"(TRY_CAST(\"varchar\" AS double)) FILTER (WHERE (TRY_CAST(\"varchar\" AS double) = -\"infinity\"())) \"varchar_as_double$neg_inf_count\"\n" + ", \"count_if\"(\"varchar\" is null) \"varchar$null_count\"\n" + ", \"count_If\"(TRY_CAST(\"varchar\" AS double) is null) \"varchar_as_double$null_count\"\n" + ", \"checksum\"(\"array_sort\"(\"varchar_array\")) \"varchar_array$checksum\"\n" + ", \"sum\"(\"array_sum\"(\"filter\"(\"transform\"(\"varchar_array\", (x) -> TRY_CAST(x AS double)), (x) -> \"is_finite\"(x)))) \"varchar_array_as_double$sum\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"varchar_array\", (x) -> TRY_CAST(x AS double)), (x) -> \"is_nan\"(x)))) \"varchar_array_as_double$nan_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"varchar_array\", (x) -> TRY_CAST(x AS double)), (x) -> (x = +\"infinity\"())))) \"varchar_array_as_double$pos_inf_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"varchar_array\", (x) -> TRY_CAST(x AS double)), (x) -> (x = -\"infinity\"())))) \"varchar_array_as_double$neg_inf_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"varchar_array\", (x) -> x is null))) \"varchar_array$null_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"varchar_array\", (x) -> TRY_CAST(x AS double)), (x) -> x is null))) \"varchar_array_as_double$null_count\"\n" + ", \"checksum\"(\"cardinality\"(\"varchar_array\")) \"varchar_array$cardinality_checksum\"\n" + ", COALESCE(\"sum\"(\"cardinality\"(\"varchar_array\")), 0) \"varchar_array$cardinality_sum\"\n" + ", \"checksum\"(\"map_varchar_varchar\") \"map_varchar_varchar$checksum\"\n" + ", \"checksum\"(\"array_sort\"(\"map_keys\"(\"map_varchar_varchar\"))) \"map_varchar_varchar$keys_checksum\"\n" + ", \"checksum\"(\"array_sort\"(\"map_values\"(\"map_varchar_varchar\"))) \"map_varchar_varchar$values_checksum\"\n" + ", \"sum\"(\"array_sum\"(\"filter\"(\"transform\"(\"map_keys\"(\"map_varchar_varchar\"), (x) -> TRY_CAST(x AS double)), (x) -> \"is_finite\"(x)))) \"map_varchar_varchar_key_array_as_double$sum\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"map_keys\"(\"map_varchar_varchar\"), (x) -> TRY_CAST(x AS double)), (x) -> \"is_nan\"(x)))) \"map_varchar_varchar_key_array_as_double$nan_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"map_keys\"(\"map_varchar_varchar\"), (x) -> TRY_CAST(x AS double)), (x) -> (x = +\"infinity\"())))) \"map_varchar_varchar_key_array_as_double$pos_inf_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"map_keys\"(\"map_varchar_varchar\"), (x) -> TRY_CAST(x AS double)), (x) -> (x = -\"infinity\"())))) \"map_varchar_varchar_key_array_as_double$neg_inf_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"map_keys\"(\"map_varchar_varchar\"), (x) -> x is null))) \"map_varchar_varchar_key_array$null_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"map_keys\"(\"map_varchar_varchar\"), (x) -> TRY_CAST(x AS double)), (x) -> x is null))) \"map_varchar_varchar_key_array_as_double$null_count\"\n" + ", \"sum\"(\"array_sum\"(\"filter\"(\"transform\"(\"map_values\"(\"map_varchar_varchar\"), (x) -> TRY_CAST(x AS double)), (x) -> \"is_finite\"(x)))) \"map_varchar_varchar_value_array_as_double$sum\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"map_values\"(\"map_varchar_varchar\"), (x) -> TRY_CAST(x AS double)), (x) -> \"is_nan\"(x)))) \"map_varchar_varchar_value_array_as_double$nan_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"map_values\"(\"map_varchar_varchar\"), (x) -> TRY_CAST(x AS double)), (x) -> (x = +\"infinity\"())))) \"map_varchar_varchar_value_array_as_double$pos_inf_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"map_values\"(\"map_varchar_varchar\"), (x) -> TRY_CAST(x AS double)), (x) -> (x = -\"infinity\"())))) \"map_varchar_varchar_value_array_as_double$neg_inf_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"map_values\"(\"map_varchar_varchar\"), (x) -> x is null))) \"map_varchar_varchar_value_array$null_count\"\n" + ", \"sum\"(\"cardinality\"(\"filter\"(\"transform\"(\"map_values\"(\"map_varchar_varchar\"), (x) -> TRY_CAST(x AS double)), (x) -> x is null))) \"map_varchar_varchar_value_array_as_double$null_count\"\n" + ", \"checksum\"(\"cardinality\"(\"map_varchar_varchar\")) \"map_varchar_varchar$cardinality_checksum\"\n" + ", COALESCE(\"sum\"(\"cardinality\"(\"map_varchar_varchar\")), 0) \"map_varchar_varchar$cardinality_sum\"\n" + "FROM\n" + " test:di\n", PARSING_OPTIONS); String[] arrayExpected = formatSql(expectedChecksumQuery, Optional.empty()).split("\n"); String[] arrayActual = formatSql(checksumQuery, Optional.empty()).split("\n"); Arrays.sort(arrayExpected); Arrays.sort(arrayActual); assertEquals(arrayActual, arrayExpected); }
static void setHttp2Authority(String authority, Http2Headers out) { // The authority MUST NOT include the deprecated "userinfo" subcomponent if (authority != null) { if (authority.isEmpty()) { out.authority(EMPTY_STRING); } else { int start = authority.indexOf('@') + 1; int length = authority.length() - start; if (length == 0) { throw new IllegalArgumentException("authority: " + authority); } out.authority(new AsciiString(authority, start, length)); } } }
@Test public void setHttp2AuthorityWithEmptyAuthority() { assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { HttpConversionUtil.setHttp2Authority("info@", new DefaultHttp2Headers()); } }); }
@Override public Map<String, String> getProperties() { Map<String, String> properties = super.getProperties(); // For materialized view, add into session variables into properties. if (super.getTableProperty() != null && super.getTableProperty().getProperties() != null) { for (Map.Entry<String, String> entry : super.getTableProperty().getProperties().entrySet()) { if (entry.getKey().startsWith(PropertyAnalyzer.PROPERTIES_MATERIALIZED_VIEW_SESSION_PREFIX)) { String varKey = entry.getKey().substring( PropertyAnalyzer.PROPERTIES_MATERIALIZED_VIEW_SESSION_PREFIX.length()); properties.put(varKey, entry.getValue()); } } } return properties; }
@Test public void testMaterializedViewWithHint() throws Exception { starRocksAssert.withDatabase("test").useDatabase("test") .withTable("CREATE TABLE test.tbl1\n" + "(\n" + " k1 date,\n" + " k2 int,\n" + " v1 int sum\n" + ")\n" + "PARTITION BY RANGE(k1)\n" + "(\n" + " PARTITION p1 values [('2022-02-01'),('2022-02-16')),\n" + " PARTITION p2 values [('2022-02-16'),('2022-03-01'))\n" + ")\n" + "DISTRIBUTED BY HASH(k2) BUCKETS 3\n" + "PROPERTIES('replication_num' = '1');") .withMaterializedView("create materialized view mv_with_hint\n" + "PARTITION BY k1\n" + "distributed by hash(k2) buckets 3\n" + "refresh async\n" + "as select /*+ SET_VAR(query_timeout = 500) */ k1, k2, sum(v1) as total from tbl1 group by k1, k2;"); Database testDb = GlobalStateMgr.getCurrentState().getDb("test"); MaterializedView mv = ((MaterializedView) testDb.getTable("mv_with_hint")); String mvTaskName = "mv-" + mv.getId(); Task task = connectContext.getGlobalStateMgr().getTaskManager().getTask(mvTaskName); Assert.assertNotNull(task); Map<String, String> taskProperties = task.getProperties(); Assert.assertTrue(taskProperties.containsKey("query_timeout")); Assert.assertEquals("500", taskProperties.get("query_timeout")); Assert.assertEquals(Constants.TaskType.EVENT_TRIGGERED, task.getType()); Assert.assertTrue(task.getDefinition(), task.getDefinition().contains("query_timeout = 500")); }
@Override public List<String> tokenise(String text) { if (text == null || text.isEmpty()) { return new ArrayList<>(); } text = text.replaceAll("[^\\p{L}\\p{N}\\s\\-'.]", " ").trim(); String[] parts = text.split("\\s+"); var tokenBuilder = new StringBuilder(); List<String> tokenParts = Stream.of(parts) .flatMap(part -> { if (isPrefixWord(part) && !tokenBuilder.isEmpty()) { tokenBuilder.append(" ").append(part); return Stream.empty(); } else { if (!tokenBuilder.isEmpty()) { String token = tokenBuilder.append(" ").append(part).toString(); tokenBuilder.setLength(0); return Stream.of(token); } else if (isPrefixWord(part)) { tokenBuilder.append(part); return Stream.empty(); } else { return Stream.of(part); } } }) .collect(Collectors.toList()); if (!tokenBuilder.isEmpty()) { tokenParts.add(tokenBuilder.toString()); } return new ArrayList<>(tokenParts); }
@Description("Tokenise, when text only has one word, then return one word") @Test void tokenise_WhenTextOnlyHasOneWord_ThenReturnOneToken() { // When var result = textTokeniser.tokenise("Glasgow"); // Then assertThat(result).isNotEmpty().hasSize(1).contains("Glasgow"); }
@Override protected ConfigData<RuleData> fromJson(final JsonObject data) { return GsonUtils.getGson().fromJson(data, new TypeToken<ConfigData<RuleData>>() { }.getType()); }
@Test public void testFromJson() { ConfigData<RuleData> ruleDataConfigData = new ConfigData<>(); RuleData ruleData = new RuleData(); ruleDataConfigData.setData(Collections.singletonList(ruleData)); JsonObject jsonObject = GsonUtils.getGson().fromJson(GsonUtils.getGson().toJson(ruleDataConfigData), JsonObject.class); assertThat(mockRuleDataRefresh.fromJson(jsonObject), is(ruleDataConfigData)); }
@Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc, boolean addFieldName, boolean addCr ) { String retval = ""; String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if ( addFieldName ) { retval += fieldname + " "; } int type = v.getType(); switch ( type ) { case ValueMetaInterface.TYPE_TIMESTAMP: case ValueMetaInterface.TYPE_DATE: retval += "TIMESTAMP"; break; case ValueMetaInterface.TYPE_BOOLEAN: retval += "CHAR(1)"; break; case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: if ( fieldname.equalsIgnoreCase( tk ) || // Technical key fieldname.equalsIgnoreCase( pk ) // Primary key ) { if ( useAutoinc ) { retval += "GENERATED ALWAYS AS IDENTITY START WITH 1 INCREMENT BY 1"; } else { retval += "BIGINT PRIMARY KEY NOT NULL"; } } else { if ( precision == 0 ) { // integer numbers if ( length > 9 ) { retval += "BIGINT"; } else { if ( length == -1 || length > 4 ) { // If the length is undefined or greater than 4, use a standard INTEGER retval += "INTEGER"; } else { if ( length > 2 ) { retval += "SMALLINT"; } else { retval += "INTEGER1"; } } } } else { retval += "FLOAT8"; } } break; case ValueMetaInterface.TYPE_STRING: // Maybe use some default DB String length in case length<=0 if ( length > 0 ) { if ( length > 32000 ) { retval += "VARCHAR(32000)"; } else { retval += "VARCHAR(" + length + ")"; } } else { retval += "VARCHAR(9999)"; } break; default: retval += " UNKNOWN"; break; } if ( addCr ) { retval += Const.CR; } return retval; }
@Test public void testGetFieldDefinition() { VectorWiseDatabaseMeta nativeMeta; nativeMeta = new VectorWiseDatabaseMeta(); nativeMeta.setAccessType( DatabaseMeta.TYPE_ACCESS_NATIVE ); assertEquals( "FOO TIMESTAMP", nativeMeta.getFieldDefinition( new ValueMetaDate( "FOO" ), "", "", false, true, false ) ); assertEquals( "TIMESTAMP", nativeMeta.getFieldDefinition( new ValueMetaTimestamp( "FOO" ), "", "", false, false, false ) ); assertEquals( "CHAR(1)", nativeMeta.getFieldDefinition( new ValueMetaBoolean( "FOO" ), "", "", false, false, false ) ); assertEquals( "GENERATED ALWAYS AS IDENTITY START WITH 1 INCREMENT BY 1", nativeMeta.getFieldDefinition( new ValueMetaNumber( "FOO", 10, 0 ), "FOO", "", true, false, false ) ); assertEquals( "GENERATED ALWAYS AS IDENTITY START WITH 1 INCREMENT BY 1", nativeMeta.getFieldDefinition( new ValueMetaInteger( "FOO", 10, 0 ), "", "FOO", true, false, false ) ); assertEquals( "BIGINT PRIMARY KEY NOT NULL", nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", 8, 0 ), "FOO", "", false, false, false ) ); assertEquals( "BIGINT PRIMARY KEY NOT NULL", nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", 8, 0 ), "", "FOO", false, false, false ) ); // Integer tests assertEquals( "BIGINT", nativeMeta.getFieldDefinition( new ValueMetaInteger( "FOO", 10, 0 ), "", "", false, false, false ) ); assertEquals( "INTEGER", nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", 6, 0 ), "", "", false, false, false ) ); assertEquals( "SMALLINT", nativeMeta.getFieldDefinition( new ValueMetaNumber( "FOO", 3, 0 ), "", "", false, false, false ) ); assertEquals( "INTEGER1", nativeMeta.getFieldDefinition( new ValueMetaInteger( "FOO", 2, 0 ), "", "", false, false, false ) ); assertEquals( "FLOAT8", nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", 6, 3 ), "", "", false, false, false ) ); // String Types assertEquals( "VARCHAR(15)", nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 15, 0 ), "", "", false, false, false ) ); assertEquals( "VARCHAR(32000)", nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 32000, 0 ), "", "", false, false, false ) ); assertEquals( "VARCHAR(32000)", nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 32050, 0 ), "", "", false, false, false ) ); assertEquals( "VARCHAR(9999)", nativeMeta.getFieldDefinition( new ValueMetaString( "FOO" ), "", "", true, false, false ) ); assertEquals( "VARCHAR(9999)", nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 0, 0 ), "", "", false, false, false ) ); assertEquals( "VARCHAR(9999)", nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", -34, 0 ), "", "", false, false, false ) ); // Unknown assertEquals( " UNKNOWN", nativeMeta.getFieldDefinition( new ValueMetaInternetAddress( "FOO" ), "", "", false, false, false ) ); assertEquals( " UNKNOWN" + System.getProperty( "line.separator" ), nativeMeta.getFieldDefinition( new ValueMetaInternetAddress( "FOO" ), "", "", false, false, true ) ); }
@Override public Map<String, Metric> getMetrics() { final Map<String, Metric> gauges = new HashMap<>(); gauges.put("name", (Gauge<String>) runtime::getName); gauges.put("vendor", (Gauge<String>) () -> String.format(Locale.US, "%s %s %s (%s)", runtime.getVmVendor(), runtime.getVmName(), runtime.getVmVersion(), runtime.getSpecVersion())); gauges.put("uptime", (Gauge<Long>) runtime::getUptime); return Collections.unmodifiableMap(gauges); }
@Test public void autoDiscoversTheRuntimeBean() { final Gauge<Long> gauge = (Gauge<Long>) new JvmAttributeGaugeSet().getMetrics().get("uptime"); assertThat(gauge.getValue()).isPositive(); }
public static <T> TreeSet<Point<T>> subset(TimeWindow subsetWindow, NavigableSet<Point<T>> points) { checkNotNull(subsetWindow); checkNotNull(points); //if the input collection is empty the output collection will be empty to if (points.isEmpty()) { return newTreeSet(); } Point<T> midPoint = Point.<T>builder() .time(subsetWindow.instantWithin(.5)) .latLong(0.0, 0.0) .build(); /* * Find exactly one point in the actual Track, ideally this point will be in the middle of * the time window */ Point<T> aPointInTrack = points.floor(midPoint); if (aPointInTrack == null) { aPointInTrack = points.ceiling(midPoint); } TreeSet<Point<T>> outputSubset = newTreeSet(); //given a starting point....go up until you hit startTime. NavigableSet<Point<T>> headset = points.headSet(aPointInTrack, true); Iterator<Point<T>> iter = headset.descendingIterator(); while (iter.hasNext()) { Point<T> pt = iter.next(); if (subsetWindow.contains(pt.time())) { outputSubset.add(pt); } if (pt.time().isBefore(subsetWindow.start())) { break; } } //given a starting point....go down until you hit endTime. NavigableSet<Point<T>> tailSet = points.tailSet(aPointInTrack, true); iter = tailSet.iterator(); while (iter.hasNext()) { Point<T> pt = iter.next(); if (subsetWindow.contains(pt.time())) { outputSubset.add(pt); } if (pt.time().isAfter(subsetWindow.end())) { break; } } return outputSubset; }
@Test public void subset_returnsEmptyCollectionWhenNothingQualifies() { Track<NopHit> t1 = createTrackFromFile(getResourceFile("Track1.txt")); TimeWindow windowThatDoesNotOverlapWithTrack = TimeWindow.of(EPOCH, EPOCH.plusSeconds(100)); TreeSet<Point<NopHit>> subset = subset(windowThatDoesNotOverlapWithTrack, t1.points()); assertThat(subset, hasSize(0)); }