focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static <T> WindowedValue.WindowedValueCoder<T> instantiateWindowedCoder( String collectionId, RunnerApi.Components components) { PipelineNode.PCollectionNode collectionNode = PipelineNode.pCollection(collectionId, components.getPcollectionsOrThrow(collectionId)); try { return (WindowedValue.WindowedValueCoder<T>) WireCoders.<T>instantiateRunnerWireCoder(collectionNode, components); } catch (IOException e) { throw new RuntimeException(e); } }
@Test public void testInstantiateWindowedCoder() throws IOException { Coder<KV<Long, String>> expectedValueCoder = KvCoder.of(VarLongCoder.of(), StringUtf8Coder.of()); SdkComponents components = SdkComponents.create(); components.registerEnvironment(Environments.createDockerEnvironment("java")); String collectionId = components.registerPCollection( PCollection.createPrimitiveOutputInternal( Pipeline.create(), WindowingStrategy.globalDefault(), PCollection.IsBounded.BOUNDED, expectedValueCoder) .setName("name")); assertEquals( expectedValueCoder, WindowUtils.instantiateWindowedCoder(collectionId, components.toComponents()) .getValueCoder()); }
public DirectoryEntry lookUp( File workingDirectory, JimfsPath path, Set<? super LinkOption> options) throws IOException { checkNotNull(path); checkNotNull(options); DirectoryEntry result = lookUp(workingDirectory, path, options, 0); if (result == null) { // an intermediate file in the path did not exist or was not a directory throw new NoSuchFileException(path.toString()); } return result; }
@Test public void testLookup_relative_withDotsInPath() throws IOException { assertExists(lookup("."), "/", "work"); assertExists(lookup("././."), "/", "work"); assertExists(lookup("./one/./././two/three"), "two", "three"); assertExists(lookup("./one/./././two/././three"), "two", "three"); assertExists(lookup("./one/./././two/three/././."), "two", "three"); }
public void setMaxAMShare(Resource resource) { maxAMShareMB.set(resource.getMemorySize()); maxAMShareVCores.set(resource.getVirtualCores()); if (customResources != null) { customResources.setMaxAMShare(resource); } }
@Test public void testSetMaxAMShare() { FSQueueMetrics metrics = setupMetrics(RESOURCE_NAME); Resource res = Resource.newInstance(2048L, 4, ImmutableMap.of(RESOURCE_NAME, 20L)); metrics.setMaxAMShare(res); assertEquals(getErrorMessage("maxAMShareMB"), 2048L, metrics.getMaxAMShareMB()); assertEquals(getErrorMessage("maxAMShareVcores"), 4L, metrics.getMaxAMShareVCores()); assertEquals(getErrorMessage("maxAMShareMB"), 2048L, metrics.getMaxAMShare().getMemorySize()); assertEquals(getErrorMessage("maxAMShareVcores"), 4L, metrics.getMaxAMShare().getVirtualCores()); assertEquals(getErrorMessage( "maxAMShare for resource: " + RESOURCE_NAME), 20L, metrics.getMaxAMShare().getResourceValue(RESOURCE_NAME)); res = Resource.newInstance(2049L, 5); metrics.setMaxAMShare(res); assertEquals(getErrorMessage("maxAMShareMB"), 2049L, metrics.getMaxAMShareMB()); assertEquals(getErrorMessage("maxAMShareVcores"), 5L, metrics.getMaxAMShareVCores()); assertEquals(getErrorMessage("maxAMShareMB"), 2049L, metrics.getMaxAMShare().getMemorySize()); assertEquals(getErrorMessage("maxAMShareVcores"), 5L, metrics.getMaxAMShare().getVirtualCores()); assertEquals(getErrorMessage( "maxAMShare for resource: " + RESOURCE_NAME), 0, metrics.getMaxAMShare().getResourceValue(RESOURCE_NAME)); }
static MethodCallExpr getKiePMMLTargetValueVariableInitializer(final TargetValue targetValueField) { final MethodDeclaration methodDeclaration = TARGETVALUE_TEMPLATE.getMethodsByName(GETKIEPMMLTARGETVALUE).get(0).clone(); final BlockStmt targetValueBody = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration))); final VariableDeclarator variableDeclarator = getVariableDeclarator(targetValueBody, TARGETVALUE).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, TARGETVALUE, targetValueBody))); variableDeclarator.setName(targetValueField.getName()); final ObjectCreationExpr targetValueFieldInstance = TargetValueFactory.getTargetValueVariableInitializer(targetValueField); final MethodCallExpr toReturn = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, TARGETVALUE, targetValueBody))) .asMethodCallExpr(); final MethodCallExpr builder = getChainedMethodCallExprFrom("builder", toReturn); final StringLiteralExpr nameExpr = new StringLiteralExpr(targetValueField.getName()); builder.setArgument(0, nameExpr); builder.setArgument(2, targetValueFieldInstance); return toReturn; }
@Test void getKiePMMLTargetValueVariableInitializer() throws IOException { TargetValue targetValue = convertToKieTargetValue(getRandomTargetValue()); MethodCallExpr retrieved = KiePMMLTargetValueFactory.getKiePMMLTargetValueVariableInitializer(targetValue); String text = getFileContent(TEST_01_SOURCE); Expression expected = JavaParserUtils.parseExpression(String.format(text, targetValue.getName(), targetValue.getValue(), targetValue.getDisplayValue(), targetValue.getPriorProbability(), targetValue.getDefaultValue())); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); List<Class<?>> imports = Arrays.asList(Arrays.class, Collections.class, KiePMMLTargetValue.class, TargetValue.class); commonValidateCompilationWithImports(retrieved, imports); }
public static ConfigurableResource parseResourceConfigValue(String value) throws AllocationConfigurationException { return parseResourceConfigValue(value, Long.MAX_VALUE); }
@Test public void testParseNewStyleResourceWithCustomResourceVcoresNegative() throws Exception { expectNegativeValueOfResource("vcores"); parseResourceConfigValue("vcores=-2,memory-mb=-5120,test1=4"); }
public static List<AclEntry> filterAclEntriesByAclSpec( List<AclEntry> existingAcl, List<AclEntry> inAclSpec) throws AclException { ValidatedAclSpec aclSpec = new ValidatedAclSpec(inAclSpec); ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES); EnumMap<AclEntryScope, AclEntry> providedMask = Maps.newEnumMap(AclEntryScope.class); EnumSet<AclEntryScope> maskDirty = EnumSet.noneOf(AclEntryScope.class); EnumSet<AclEntryScope> scopeDirty = EnumSet.noneOf(AclEntryScope.class); for (AclEntry existingEntry: existingAcl) { if (aclSpec.containsKey(existingEntry)) { scopeDirty.add(existingEntry.getScope()); if (existingEntry.getType() == MASK) { maskDirty.add(existingEntry.getScope()); } } else { if (existingEntry.getType() == MASK) { providedMask.put(existingEntry.getScope(), existingEntry); } else { aclBuilder.add(existingEntry); } } } copyDefaultsIfNeeded(aclBuilder); calculateMasks(aclBuilder, providedMask, maskDirty, scopeDirty); return buildAndValidateAcl(aclBuilder); }
@Test(expected=AclException.class) public void testFilterAclEntriesByAclSpecInputTooLarge() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); filterAclEntriesByAclSpec(existing, ACL_SPEC_TOO_LARGE); }
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { StopWatch sw = new StopWatch().start(); FileStatus[] stats = listStatus(job); // Save the number of input files for metrics/loadgen job.setLong(NUM_INPUT_FILES, stats.length); long totalSize = 0; // compute total size boolean ignoreDirs = !job.getBoolean(INPUT_DIR_RECURSIVE, false) && job.getBoolean(INPUT_DIR_NONRECURSIVE_IGNORE_SUBDIRS, false); List<FileStatus> files = new ArrayList<>(stats.length); for (FileStatus file: stats) { // check we have valid files if (file.isDirectory()) { if (!ignoreDirs) { throw new IOException("Not a file: "+ file.getPath()); } } else { files.add(file); totalSize += file.getLen(); } } long goalSize = totalSize / (numSplits == 0 ? 1 : numSplits); long minSize = Math.max(job.getLong(org.apache.hadoop.mapreduce.lib.input. FileInputFormat.SPLIT_MINSIZE, 1), minSplitSize); // generate splits ArrayList<FileSplit> splits = new ArrayList<FileSplit>(numSplits); NetworkTopology clusterMap = new NetworkTopology(); for (FileStatus file: files) { Path path = file.getPath(); long length = file.getLen(); if (length != 0) { FileSystem fs = path.getFileSystem(job); BlockLocation[] blkLocations; if (file instanceof LocatedFileStatus) { blkLocations = ((LocatedFileStatus) file).getBlockLocations(); } else { blkLocations = fs.getFileBlockLocations(file, 0, length); } if (isSplitable(fs, path)) { long blockSize = file.getBlockSize(); long splitSize = computeSplitSize(goalSize, minSize, blockSize); long bytesRemaining = length; while (((double) bytesRemaining)/splitSize > SPLIT_SLOP) { String[][] splitHosts = getSplitHostsAndCachedHosts(blkLocations, length-bytesRemaining, splitSize, clusterMap); splits.add(makeSplit(path, length-bytesRemaining, splitSize, splitHosts[0], splitHosts[1])); bytesRemaining -= splitSize; } if (bytesRemaining != 0) { String[][] splitHosts = getSplitHostsAndCachedHosts(blkLocations, length - bytesRemaining, bytesRemaining, clusterMap); splits.add(makeSplit(path, length - bytesRemaining, bytesRemaining, splitHosts[0], splitHosts[1])); } } else { if (LOG.isDebugEnabled()) { // Log only if the file is big enough to be splitted if (length > Math.min(file.getBlockSize(), minSize)) { LOG.debug("File is not splittable so no parallelization " + "is possible: " + file.getPath()); } } String[][] splitHosts = getSplitHostsAndCachedHosts(blkLocations,0,length,clusterMap); splits.add(makeSplit(path, 0, length, splitHosts[0], splitHosts[1])); } } else { //Create empty hosts array for zero length files splits.add(makeSplit(path, 0, length, new String[0])); } } sw.stop(); if (LOG.isDebugEnabled()) { LOG.debug("Total # of splits generated by getSplits: " + splits.size() + ", TimeTaken: " + sw.now(TimeUnit.MILLISECONDS)); } return splits.toArray(new FileSplit[splits.size()]); }
@Test public void testListLocatedStatus() throws Exception { Configuration conf = getConfiguration(); conf.setBoolean("fs.test.impl.disable.cache", false); conf.setInt(FileInputFormat.LIST_STATUS_NUM_THREADS, numThreads); conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR, "test:///a1/a2"); MockFileSystem mockFs = (MockFileSystem) new Path("test:///").getFileSystem(conf); Assert.assertEquals("listLocatedStatus already called", 0, mockFs.numListLocatedStatusCalls); JobConf job = new JobConf(conf); TextInputFormat fileInputFormat = new TextInputFormat(); fileInputFormat.configure(job); InputSplit[] splits = fileInputFormat.getSplits(job, 1); Assert.assertEquals("Input splits are not correct", 2, splits.length); Assert.assertEquals("listLocatedStatuss calls", 1, mockFs.numListLocatedStatusCalls); FileSystem.closeAll(); }
@Override public TypeSerializerSchemaCompatibility<T> resolveSchemaCompatibility( TypeSerializerSnapshot<T> oldSerializerSnapshot) { if (!(oldSerializerSnapshot instanceof AvroSerializerSnapshot)) { return TypeSerializerSchemaCompatibility.incompatible(); } AvroSerializerSnapshot<?> oldAvroSerializerSnapshot = (AvroSerializerSnapshot<?>) oldSerializerSnapshot; return resolveSchemaCompatibility(oldAvroSerializerSnapshot.schema, schema); }
@Test void addingARequiredMakesSerializersIncompatible() { assertThat( AvroSerializerSnapshot.resolveSchemaCompatibility( FIRST_REQUIRED_LAST_OPTIONAL, BOTH_REQUIRED)) .is(isIncompatible()); }
public URL getInterNodeListener( final Function<URL, Integer> portResolver ) { return getInterNodeListener(portResolver, LOGGER); }
@Test public void shouldSanitizeInterNodeListenerWithTrailingSlash() { // Given: final URL expected = url("https://example.com:12345"); final URL configured = url("https://example.com:12345/"); final KsqlRestConfig config = new KsqlRestConfig(ImmutableMap.<String, Object>builder() .putAll(MIN_VALID_CONFIGS) .put(ADVERTISED_LISTENER_CONFIG, configured.toString()) .build() ); // When: final URL actual = config.getInterNodeListener(portResolver, logger); // Then: assertThat(actual, is(expected)); verifyLogsInterNodeListener(expected, QUOTED_INTER_NODE_LISTENER_CONFIG); verifyNoMoreInteractions(logger); }
@Override public Rule getByUuid(String uuid) { ensureInitialized(); Rule rule = rulesByUuid.get(uuid); checkArgument(rule != null, "Can not find rule for uuid %s. This rule does not exist in DB", uuid); return rule; }
@Test public void getByUuid_returns_Rule_if_it_exists_in_DB() { Rule rule = underTest.getByUuid(AB_RULE.getUuid()); assertIsABRule(rule); }
@Override public int getMaxConnections() { return 0; }
@Test void assertGetMaxConnections() { assertThat(metaData.getMaxConnections(), is(0)); }
@Override public Object read(final PostgreSQLPacketPayload payload, final int parameterValueLength) { byte[] bytes = new byte[parameterValueLength]; payload.getByteBuf().readBytes(bytes); return ARRAY_PARAMETER_DECODER.decodeFloat4Array(bytes, '{' != bytes[0]); }
@Test void assertRead() { String parameterValue = "{\"11.1\",\"12.1\"}"; int expectedLength = 4 + parameterValue.length(); ByteBuf byteBuf = ByteBufTestUtils.createByteBuf(expectedLength); byteBuf.writeInt(parameterValue.length()); byteBuf.writeCharSequence(parameterValue, StandardCharsets.ISO_8859_1); byteBuf.readInt(); PostgreSQLPacketPayload payload = new PostgreSQLPacketPayload(byteBuf, StandardCharsets.UTF_8); Object actual = newInstance().read(payload, parameterValue.length()); assertThat(actual, is(new float[]{11.1F, 12.1F})); assertThat(byteBuf.readerIndex(), is(expectedLength)); }
@Override public Message receive() { Message message = delegate.receive(); handleReceive(message); return message; }
@Test void receive_retains_baggage_properties() throws Exception { ActiveMQTextMessage message = new ActiveMQTextMessage(); B3Propagation.B3_STRING.injector(SETTER).inject(parent, message); message.setStringProperty(BAGGAGE_FIELD_KEY, ""); receive(message); assertThat(message.getProperties()) .containsEntry(BAGGAGE_FIELD_KEY, ""); testSpanHandler.takeRemoteSpan(CONSUMER); }
public static String summarizeCollection(Object obj) { if (obj instanceof Collection) { return String.format( "%s{%d entries}", obj.getClass().getSimpleName(), ((Collection<?>) obj).size()); } else if (obj instanceof Map) { return String.format("Map{%d entries}", ((Map<?, ?>) obj).size()); } return Objects.toString(obj); }
@Test public void summarizeCollection() { Set<Integer> set = Sets.newHashSet(1, 2); assertEquals("HashSet{2 entries}", CommonUtils.summarizeCollection(set)); Map<Integer, Long> map = ImmutableMap.of(0, 3L, 1, 1L); assertEquals("Map{2 entries}", CommonUtils.summarizeCollection(map)); TestClassA a = new TestClassA(); assertEquals(a.toString(), CommonUtils.summarizeCollection(a)); assertEquals("null", CommonUtils.summarizeCollection(null)); }
public static <FnT extends DoFn<?, ?>> DoFnSignature getSignature(Class<FnT> fn) { return signatureCache.computeIfAbsent(fn, DoFnSignatures::parseSignature); }
@Test public void testBasicDoFnProcessContext() throws Exception { DoFnSignature sig = DoFnSignatures.getSignature( new DoFn<String, String>() { @ProcessElement public void process(ProcessContext c) {} }.getClass()); assertThat(sig.processElement().extraParameters().size(), equalTo(1)); assertThat( sig.processElement().extraParameters().get(0), instanceOf(ProcessContextParameter.class)); }
public Map<String, Object> getTemplateData() { return templateData; }
@Test public void givenTemplateFile_whenTemplateDataRendered_ThenTemplateTagsReplacedWithDataInOutput() throws IOException { inStream = WordDocumentFromTemplate.class.getClassLoader() .getResourceAsStream("template.docx"); XWPFTemplate template = XWPFTemplate.compile(inStream, builder.build()) .render(docEditor.getTemplateData()); template.writeAndClose(new FileOutputStream(new File(outputPath.toString()))); assertTrue(searchWordInOutPut("John")); assertTrue(searchWordInOutPut("Florida,USA")); assertTrue(searchWordInOutPut("This is sample plugin custom-plugin")); }
@Override public void run() { if (processor != null) { processor.execute(); } else { if (!beforeHook()) { logger.info("before-feature hook returned [false], aborting: {}", this); } else { scenarios.forEachRemaining(this::processScenario); } afterFeature(); } }
@Test void testExtract() { run("extract.feature"); }
public Ligature(String successor, String ligature) { this.successor = successor; this.liga = ligature; }
@Test void testLigature() { Ligature ligature = new Ligature("successor", "ligature"); assertEquals("successor", ligature.getSuccessor()); assertEquals("ligature", ligature.getLigature()); }
public static void extractZipFile(ZipFile zipFile, File toDir, String prefix) throws IOException { ensureDirectory(toDir); final String base = toDir.getCanonicalPath(); Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); if (!entry.isDirectory()) { if (prefix != null && !entry.getName().startsWith(prefix)) { //No need to extract it, it is not what we are looking for. continue; } String entryName; if (prefix != null) { entryName = entry.getName().substring(prefix.length()); LOG.debug("Extracting {} shortened to {} into {}", entry.getName(), entryName, toDir); } else { entryName = entry.getName(); } File file = new File(toDir, entryName); String found = file.getCanonicalPath(); if (!found.startsWith(base)) { LOG.error("Invalid location {} is outside of {}", found, base); continue; } try (InputStream in = zipFile.getInputStream(entry)) { ensureDirectory(file.getParentFile()); try (OutputStream out = new FileOutputStream(file)) { IOUtils.copy(in, out); } } } } }
@Test public void testExtractZipFileDisallowsPathTraversalWhenUsingPrefix() throws Exception { try (TmpPath path = new TmpPath()) { Path testRoot = Paths.get(path.getPath()); Path destParent = testRoot.resolve("outer"); Path extractionDest = destParent.resolve("resources"); Files.createDirectories(extractionDest); /* * Contains resources/good.txt and resources/../evil.txt. Evil.txt should not be extracted as it would end * up outside the extraction dest. */ try (ZipFile zip = new ZipFile(Paths.get("src/test/resources/evil-path-traversal-resources.jar").toFile())) { ServerUtils.extractZipFile(zip, extractionDest.toFile(), "resources"); } assertThat(Files.exists(extractionDest.resolve("good.txt")), is(true)); assertThat(Files.exists(extractionDest.resolve("evil.txt")), is(false)); assertThat(Files.exists(destParent.resolve("evil.txt")), is(false)); } }
public static Map<String, String> initQueryParams(final String query) { final Map<String, String> queryParams = new LinkedHashMap<>(); if (StringUtils.hasLength(query)) { final Matcher matcher = PATTERN.matcher(query); while (matcher.find()) { String name = decodeQueryParam(matcher.group(1)); String eq = matcher.group(2); String value = matcher.group(3); value = StringUtils.hasLength(value) ? decodeQueryParam(value) : (StringUtils.hasLength(eq) ? "" : null); queryParams.put(name, value); } } return queryParams; }
@Test public void testInitQueryParams() { Map<String, String> params = HttpParamConverter.initQueryParams("a=1&b=2&c=&d"); assertThat(params, allOf(IsMapContaining.hasEntry("a", "1"), IsMapContaining.hasEntry("b", "2"), IsMapContaining.hasEntry("c", ""), IsMapContaining.hasEntry("d", null))); params = HttpParamConverter.initQueryParams(""); assertEquals(0, params.size()); }
@Override public NativeQuerySpec<Record> select(String sql, Object... args) { return new NativeQuerySpecImpl<>(this, sql, args, DefaultRecord::new, false); }
@Test public void testDistinct() { DefaultQueryHelper helper = new DefaultQueryHelper(database); database.dml() .insert("s_test") .value("id", "distinct-test") .value("name", "testDistinct") .value("testName", "distinct") .value("age", 33) .execute() .sync(); helper.select("select distinct name from s_test ", 0) .fetchPaged(0, 10) .doOnNext(v -> System.out.println(JSON.toJSONString(v, SerializerFeature.PrettyFormat))) .as(StepVerifier::create) .expectNextCount(1) .verifyComplete(); }
@Override public boolean isNotDevelopment() { return original.isNotDevelopment(); }
@Test public void isNotDevelopment() { assertEquals(pluginManager.isNotDevelopment(), wrappedPluginManager.isNotDevelopment()); }
public int merge(final K key, final int value, final IntIntFunction remappingFunction) { requireNonNull(key); requireNonNull(remappingFunction); final int missingValue = this.missingValue; if (missingValue == value) { throw new IllegalArgumentException("cannot accept missingValue"); } final K[] keys = this.keys; final int[] values = this.values; @DoNotSub final int mask = values.length - 1; @DoNotSub int index = Hashing.hash(key, mask); int oldValue; while (missingValue != (oldValue = values[index])) { if (Objects.equals(keys[index], key)) { break; } index = ++index & mask; } final int newValue = missingValue == oldValue ? value : remappingFunction.apply(oldValue, value); if (missingValue != newValue) { keys[index] = key; values[index] = newValue; if (++size > resizeThreshold) { increaseCapacity(); } } else { keys[index] = null; values[index] = missingValue; --size; compactChain(index); } return newValue; }
@Test void mergeThrowsNullPointerExceptionIfKeyIsNull() { assertThrowsExactly(NullPointerException.class, () -> objectToIntMap.merge(null, 42, (v1, v2) -> 7)); }
public ConnectorType connectorType(Map<String, String> connConfig) { if (connConfig == null) { return ConnectorType.UNKNOWN; } String connClass = connConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connClass == null) { return ConnectorType.UNKNOWN; } try { return ConnectorType.from(getConnector(connClass).getClass()); } catch (ConnectException e) { log.warn("Unable to retrieve connector type", e); return ConnectorType.UNKNOWN; } }
@Test public void testGetConnectorTypeWithNullConfig() { AbstractHerder herder = testHerder(); assertEquals(ConnectorType.UNKNOWN, herder.connectorType(null)); }
@Udf public <T> String toJsonString(@UdfParameter final T input) { return toJson(input); }
@Test public void shouldSerializeInt() { // When: final String result = udf.toJsonString(123); // Then: assertEquals("123", result); }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal n) { return invoke(n, BigDecimal.ZERO); }
@Test void invokeRoundingDown() { FunctionTestUtil.assertResult(roundHalfDownFunction.invoke(BigDecimal.valueOf(10.24)), BigDecimal.valueOf(10)); FunctionTestUtil.assertResult(roundHalfDownFunction.invoke(BigDecimal.valueOf(10.24), BigDecimal.ONE), BigDecimal.valueOf(10.2)); }
@VisibleForTesting ZonedDateTime parseZoned(final String text, final ZoneId zoneId) { final TemporalAccessor parsed = formatter.parse(text); final ZoneId parsedZone = parsed.query(TemporalQueries.zone()); ZonedDateTime resolved = DEFAULT_ZONED_DATE_TIME.apply( ObjectUtils.defaultIfNull(parsedZone, zoneId)); for (final TemporalField override : ChronoField.values()) { if (parsed.isSupported(override)) { if (!resolved.isSupported(override)) { throw new KsqlException( "Unsupported temporal field in timestamp: " + text + " (" + override + ")"); } final long value = parsed.getLong(override); if (override == ChronoField.DAY_OF_YEAR && value == LEAP_DAY_OF_THE_YEAR) { if (!parsed.isSupported(ChronoField.YEAR)) { throw new KsqlException("Leap day cannot be parsed without supplying the year field"); } // eagerly override year, to avoid mismatch with epoch year, which is not a leap year resolved = resolved.withYear(parsed.get(ChronoField.YEAR)); } resolved = resolved.with(override, value); } } return resolved; }
@Test public void shouldParseFullLocalDateWithNanoSeconds() { // Given final String format = "yyyy-MM-dd HH:mm:ss:nnnnnnnnn"; // Note that there is an issue when resolving nanoseconds that occur below the // micro-second granularity. Since this is a private API (the only one exposed // converts it to millis) we can safely ignore it. final String timestamp = "1605-11-05 10:10:10:001000000"; // When final ZonedDateTime ts = new StringToTimestampParser(format).parseZoned(timestamp, ZID); // Then assertThat(ts, is(sameInstant(FIFTH_OF_NOVEMBER .withHour(10) .withMinute(10) .withSecond(10) .withNano(1_000_000)))); }
@Override public T deserialize(final String topic, final byte[] bytes) { try { if (bytes == null) { return null; } // don't use the JsonSchemaConverter to read this data because // we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS, // which is not currently available in the standard converters final JsonNode value = isJsonSchema ? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class) : MAPPER.readTree(bytes); final Object coerced = enforceFieldType( "$", new JsonValueContext(value, schema) ); if (LOG.isTraceEnabled()) { LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced); } return SerdeUtils.castToTargetType(coerced, targetType); } catch (final Exception e) { // Clear location in order to avoid logging data, for security reasons if (e instanceof JsonParseException) { ((JsonParseException) e).clearLocation(); } throw new SerializationException( "Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e); } }
@Test public void shouldThrowIfCanNotCoerceArrayElement() { // Given: final KsqlJsonDeserializer<List> deserializer = givenDeserializerForSchema( SchemaBuilder .array(Schema.OPTIONAL_INT32_SCHEMA) .build(), List.class ); final List<String> expected = ImmutableList.of("not", "numbers"); final byte[] bytes = serializeJson(expected); // When: final Exception e = assertThrows( SerializationException.class, () -> deserializer.deserialize(SOME_TOPIC, bytes) ); // Then: assertThat(e.getCause(), (hasMessage(startsWith( "Can't coerce string to type. targetType: INTEGER")))); }
public MonetaryFormat repeatOptionalDecimals(int decimals, int repetitions) { checkArgument(repetitions >= 0, () -> "repetitions cannot be negative: " + repetitions); List<Integer> decimalGroups = new ArrayList<>(repetitions); for (int i = 0; i < repetitions; i++) decimalGroups.add(decimals); return new MonetaryFormat(negativeSign, positiveSign, zeroDigit, decimalMark, minDecimals, decimalGroups, shift, roundingMode, codes, codeSeparator, codePrefixed); }
@Test public void repeatOptionalDecimals() { assertEquals("0.00000001", formatRepeat(SATOSHI, 2, 4)); assertEquals("0.00000010", formatRepeat(SATOSHI.multiply(10), 2, 4)); assertEquals("0.01", formatRepeat(CENT, 2, 4)); assertEquals("0.10", formatRepeat(CENT.multiply(10), 2, 4)); assertEquals("0", formatRepeat(SATOSHI, 2, 2)); assertEquals("0", formatRepeat(SATOSHI.multiply(10), 2, 2)); assertEquals("0.01", formatRepeat(CENT, 2, 2)); assertEquals("0.10", formatRepeat(CENT.multiply(10), 2, 2)); assertEquals("0", formatRepeat(CENT, 2, 0)); assertEquals("0", formatRepeat(CENT.multiply(10), 2, 0)); }
@Override @SuppressWarnings("DuplicatedCode") public Integer cleanErrorLog(Integer exceedDay, Integer deleteLimit) { int count = 0; LocalDateTime expireDate = LocalDateTime.now().minusDays(exceedDay); // 循环删除,直到没有满足条件的数据 for (int i = 0; i < Short.MAX_VALUE; i++) { int deleteCount = apiErrorLogMapper.deleteByCreateTimeLt(expireDate, deleteLimit); count += deleteCount; // 达到删除预期条数,说明到底了 if (deleteCount < deleteLimit) { break; } } return count; }
@Test public void testCleanJobLog() { // mock 数据 ApiErrorLogDO log01 = randomPojo(ApiErrorLogDO.class, o -> o.setCreateTime(addTime(Duration.ofDays(-3)))); apiErrorLogMapper.insert(log01); ApiErrorLogDO log02 = randomPojo(ApiErrorLogDO.class, o -> o.setCreateTime(addTime(Duration.ofDays(-1)))); apiErrorLogMapper.insert(log02); // 准备参数 Integer exceedDay = 2; Integer deleteLimit = 1; // 调用 Integer count = apiErrorLogService.cleanErrorLog(exceedDay, deleteLimit); // 断言 assertEquals(1, count); List<ApiErrorLogDO> logs = apiErrorLogMapper.selectList(); assertEquals(1, logs.size()); assertEquals(log02, logs.get(0)); }
@Override public String selectForUpdateSkipLocked() { return supportsSelectForUpdateSkipLocked ? " FOR UPDATE SKIP LOCKED" : ""; }
@Test void otherDBDoesNotSupportSelectForUpdateSkipLocked() { assertThat(new MariaDbDialect("MySQL", "10.6").selectForUpdateSkipLocked()).isEmpty(); }
public static InetAddress getRemoteAddress() { return REMOTE_ADDRESS_CONTEXT_KEY.get(); }
@Test void getRemoteAddress() { when(clientConnectionManager.getRemoteAddress(any())) .thenReturn(Optional.empty()); GrpcTestUtils.assertStatusException(Status.INTERNAL, this::getRequestAttributes); final String remoteAddressString = "6.7.8.9"; when(clientConnectionManager.getRemoteAddress(any())) .thenReturn(Optional.of(InetAddresses.forString(remoteAddressString))); assertEquals(remoteAddressString, getRequestAttributes().getRemoteAddress()); }
private void insertUndoLog(String xid, long branchId, String rollbackCtx, byte[] undoLogContent, State state, Connection conn) throws SQLException { try (PreparedStatement pst = conn.prepareStatement(INSERT_UNDO_LOG_SQL)) { pst.setLong(1, branchId); pst.setString(2, xid); pst.setString(3, rollbackCtx); pst.setObject(4, new ByteArrayInputStream(undoLogContent)); pst.setInt(5, state.getValue()); pst.executeUpdate(); } catch (Exception e) { if (!(e instanceof SQLException)) { e = new SQLException(e); } throw (SQLException) e; } }
@Test public void testInsertUndoLog() throws SQLException { Assertions.assertDoesNotThrow(() -> undoLogManager.insertUndoLogWithGlobalFinished("xid", 1L, new JacksonUndoLogParser(), dataSource.getConnection())); Assertions.assertDoesNotThrow(() -> undoLogManager.insertUndoLogWithNormal("xid", 1L, "", new byte[]{}, dataSource.getConnection())); Assertions.assertDoesNotThrow(() -> undoLogManager.deleteUndoLogByLogCreated(new Date(), 3000, connectionProxy)); }
@Override public SeekableByteChannel getChannel() { return new RedissonByteChannel(); }
@Test public void testChannelTruncate() throws IOException { RBinaryStream stream = redisson.getBinaryStream("test"); SeekableByteChannel c = stream.getChannel(); c.write(ByteBuffer.wrap(new byte[]{1, 2, 3, 4, 5, 6, 7})); assertThat(c.size()).isEqualTo(7); c.truncate(3); c.position(0); c.truncate(10); ByteBuffer b = ByteBuffer.allocate(3); c.read(b); byte[] bb = new byte[3]; b.flip(); b.get(bb); assertThat(c.size()).isEqualTo(3); assertThat(bb).isEqualTo(new byte[]{1, 2, 3}); c.truncate(0); assertThat(c.size()).isEqualTo(0); }
public EqualityPartition generateEqualitiesPartitionedBy(Predicate<VariableReferenceExpression> variableScope) { ImmutableSet.Builder<RowExpression> scopeEqualities = ImmutableSet.builder(); ImmutableSet.Builder<RowExpression> scopeComplementEqualities = ImmutableSet.builder(); ImmutableSet.Builder<RowExpression> scopeStraddlingEqualities = ImmutableSet.builder(); for (Collection<RowExpression> equalitySet : equalitySets.asMap().values()) { Set<RowExpression> scopeExpressions = new LinkedHashSet<>(); Set<RowExpression> scopeComplementExpressions = new LinkedHashSet<>(); Set<RowExpression> scopeStraddlingExpressions = new LinkedHashSet<>(); // Try to push each non-derived expression into one side of the scope for (RowExpression expression : filter(equalitySet, not(derivedExpressions::contains))) { RowExpression scopeRewritten = rewriteExpression(expression, variableScope, false); if (scopeRewritten != null) { scopeExpressions.add(scopeRewritten); } RowExpression scopeComplementRewritten = rewriteExpression(expression, not(variableScope), false); if (scopeComplementRewritten != null) { scopeComplementExpressions.add(scopeComplementRewritten); } if (scopeRewritten == null && scopeComplementRewritten == null) { scopeStraddlingExpressions.add(expression); } } // Compile the equality expressions on each side of the scope RowExpression matchingCanonical = getCanonical(scopeExpressions); if (scopeExpressions.size() >= 2) { for (RowExpression expression : filter(scopeExpressions, not(equalTo(matchingCanonical)))) { scopeEqualities.add(buildEqualsExpression(functionAndTypeManager, matchingCanonical, expression)); } } RowExpression complementCanonical = getCanonical(scopeComplementExpressions); if (scopeComplementExpressions.size() >= 2) { for (RowExpression expression : filter(scopeComplementExpressions, not(equalTo(complementCanonical)))) { scopeComplementEqualities.add(buildEqualsExpression(functionAndTypeManager, complementCanonical, expression)); } } // Compile the scope straddling equality expressions List<RowExpression> connectingExpressions = new ArrayList<>(); connectingExpressions.add(matchingCanonical); connectingExpressions.add(complementCanonical); connectingExpressions.addAll(scopeStraddlingExpressions); connectingExpressions = ImmutableList.copyOf(filter(connectingExpressions, Predicates.notNull())); RowExpression connectingCanonical = getCanonical(connectingExpressions); if (connectingCanonical != null) { for (RowExpression expression : filter(connectingExpressions, not(equalTo(connectingCanonical)))) { scopeStraddlingEqualities.add(buildEqualsExpression(functionAndTypeManager, connectingCanonical, expression)); } } } return new EqualityPartition(scopeEqualities.build(), scopeComplementEqualities.build(), scopeStraddlingEqualities.build()); }
@Test(dataProvider = "testRowExpressions") public void testExpressionsThatMayReturnNullOnNonNullInput(RowExpression candidate) { EqualityInference.Builder builder = new EqualityInference.Builder(METADATA); builder.extractInferenceCandidates(equals(variable("b"), variable("x"))); builder.extractInferenceCandidates(equals(variable("a"), candidate)); EqualityInference inference = builder.build(); List<RowExpression> equalities = inference.generateEqualitiesPartitionedBy(matchesVariables("b")).getScopeStraddlingEqualities(); assertEquals(equalities.size(), 1); assertTrue(equalities.get(0).equals(equals(variable("x"), variable("b"))) || equalities.get(0).equals(equals(variable("b"), variable("x")))); }
@SuppressWarnings("unchecked") public final <T> T getValue(final E key) { return (T) cache.get(key).getValue(); }
@Test void assertGetValue() { Properties props = createProperties(); TypedPropertiesFixture actual = new TypedPropertiesFixture(props); assertTrue((Boolean) actual.getValue(TypedPropertyKeyFixture.BOOLEAN_VALUE)); assertTrue((Boolean) actual.getValue(TypedPropertyKeyFixture.BOOLEAN_OBJECT_VALUE)); assertThat(actual.getValue(TypedPropertyKeyFixture.INT_VALUE), is(100)); assertThat(actual.getValue(TypedPropertyKeyFixture.INT_OBJECT_VALUE), is(100)); assertThat(actual.getValue(TypedPropertyKeyFixture.LONG_VALUE), is(10000L)); assertThat(actual.getValue(TypedPropertyKeyFixture.LONG_OBJECT_VALUE), is(10000L)); assertThat(actual.getValue(TypedPropertyKeyFixture.STRING_VALUE), is("new_value")); assertThat(actual.getProps(), is(props)); }
public static Predicate[] acceptVisitor(Predicate[] predicates, Visitor visitor, IndexRegistry indexes) { Predicate[] target = predicates; boolean copyCreated = false; for (int i = 0; i < predicates.length; i++) { Predicate predicate = predicates[i]; if (predicate instanceof VisitablePredicate visitablePredicate) { Predicate transformed = visitablePredicate.accept(visitor, indexes); if (transformed != predicate) { if (!copyCreated) { copyCreated = true; target = createCopy(target); } target[i] = transformed; } } } return target; }
@Test public void acceptVisitor_whenThereIsNonVisitablePredicateAndNewArraysIsCreated_thenJustCopyTheNonVisitablePredicate() { Visitor mockVisitor = mock(Visitor.class); Predicate[] predicates = new Predicate[3]; Predicate p1 = mock(Predicate.class); predicates[0] = p1; Predicate transformed = mock(Predicate.class); Predicate p2 = createMockVisitablePredicate(transformed); predicates[1] = p2; Predicate p3 = mock(Predicate.class); predicates[2] = p3; Predicate[] result = VisitorUtils.acceptVisitor(predicates, mockVisitor, mockIndexes); assertThat(result).isNotSameAs(predicates); assertThat(result).hasSize(3); assertThat(result).containsExactlyInAnyOrder(p1, transformed, p3); }
@Nullable public static URI uriWithTrailingSlash(@Nullable final URI uri) { if (uri == null) { return null; } final String path = firstNonNull(uri.getPath(), "/"); if (path.endsWith("/")) { return uri; } else { try { return new URI( uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), path + "/", uri.getQuery(), uri.getFragment()); } catch (URISyntaxException e) { throw new RuntimeException("Could not parse URI.", e); } } }
@Test public void uriWithTrailingSlashReturnsURIWithTrailingSlashIfTrailingSlashIsMissing() throws URISyntaxException { final String uri = "http://example.com/api/"; assertEquals(URI.create(uri), Tools.uriWithTrailingSlash(URI.create("http://example.com/api"))); }
@Override public CompletableFuture<ConsumerRunningInfo> getConsumerRunningInfo(String address, GetConsumerRunningInfoRequestHeader requestHeader, long timeoutMillis) { CompletableFuture<ConsumerRunningInfo> future = new CompletableFuture<>(); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.GET_CONSUMER_RUNNING_INFO, requestHeader); remotingClient.invoke(address, request, timeoutMillis).thenAccept(response -> { if (response.getCode() == ResponseCode.SUCCESS) { ConsumerRunningInfo info = ConsumerRunningInfo.decode(response.getBody(), ConsumerRunningInfo.class); future.complete(info); } else { log.warn("getConsumerRunningInfo getResponseCommand failed, {} {}", response.getCode(), response.getRemark()); future.completeExceptionally(new MQClientException(response.getCode(), response.getRemark())); } }); return future; }
@Test public void assertGetConsumerRunningInfoWithError() { setResponseError(); GetConsumerRunningInfoRequestHeader requestHeader = mock(GetConsumerRunningInfoRequestHeader.class); CompletableFuture<ConsumerRunningInfo> actual = mqClientAdminImpl.getConsumerRunningInfo(defaultBrokerAddr, requestHeader, defaultTimeout); Throwable thrown = assertThrows(ExecutionException.class, actual::get); assertTrue(thrown.getCause() instanceof MQClientException); MQClientException mqException = (MQClientException) thrown.getCause(); assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode()); assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null")); }
@Override public void define(Context context) { NewController controller = context.createController("api/monitoring") .setDescription("Monitoring") .setSince("9.3"); for (MonitoringWsAction action : actions) { action.define(controller); } controller.done(); }
@Test public void define_controller() { WebService.Context context = new WebService.Context(); underTest.define(context); WebService.Controller controller = context.controller("api/monitoring"); assertThat(controller).isNotNull(); assertThat(controller.description()).isNotEmpty(); assertThat(controller.since()).isEqualTo("9.3"); assertThat(controller.actions()).hasSize(1); }
public static SearchTypeError parse(Query query, String searchTypeId, ElasticsearchException ex) { if (isSearchTypeAbortedError(ex)) { return new SearchTypeAbortedError(query, searchTypeId, ex); } Throwable possibleResultWindowException = ex; int attempt = 0; while (possibleResultWindowException != null && attempt < MAX_DEPTH_OF_EXCEPTION_CAUSE_ANALYSIS) { final Integer resultWindowLimit = parseResultLimit(possibleResultWindowException); if (resultWindowLimit != null) { return new ResultWindowLimitError(query, searchTypeId, resultWindowLimit); } possibleResultWindowException = possibleResultWindowException.getCause(); attempt++; } return new SearchTypeError(query, searchTypeId, ex); }
@Test void returnsResultWindowLimitErrorIfPresentInTheExceptionsCauseChain() { final ElasticsearchException elasticsearchException = new ElasticsearchException( "Something is wrong!", new IllegalStateException( "Run for your lives!!!", new ElasticsearchException("Result window is too large, [from + size] must be less than or equal to: [42]") )); final SearchTypeError error = SearchTypeErrorParser.parse(query, "searchTypeId", elasticsearchException); assertThat(error).isInstanceOf(ResultWindowLimitError.class); assertThat((ResultWindowLimitError) error) .satisfies(e -> assertEquals(42, e.getResultWindowLimit())) .satisfies(e -> assertEquals("searchTypeId", e.searchTypeId())) .satisfies(e -> assertEquals("test_query", e.queryId())); }
public String getUtf8CString() throws IOException { ArrayList<ByteBuffer> bufferList = null; boolean foundZeroByte = false; int numBytes = 0; while (foundZeroByte == false && advanceBufferIfCurrentBufferHasNoRemaining()) { int position = _currentBuffer.position(); byte[] array = _currentBuffer.array(); int arrayOffset = _currentBuffer.arrayOffset(); int limit = _currentBuffer.limit(); int arrayLimit = arrayOffset + limit; int arrayStart = arrayOffset + position; int arrayIndex = arrayStart; while (arrayIndex < arrayLimit && array[arrayIndex] != ZERO_BYTE) { arrayIndex++; } foundZeroByte = (arrayIndex < arrayLimit); int bytesInCurrentBuffer = arrayIndex - arrayStart; numBytes += bytesInCurrentBuffer; if (foundZeroByte == false || numBytes != bytesInCurrentBuffer) { bufferList = accummulateByteBuffers(bufferList, bytesInCurrentBuffer); } } if (foundZeroByte == false) { throw new BufferUnderflowException(); } return bufferToUtf8CString(numBytes, bufferList); }
@Test public void testGetUTF8CString() throws Exception { for (Map.Entry<String, String> entry : _strings.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); for (int bufferSize : _bufferSizes) { // out.println("testing " + key + " with buffer size " + bufferSize); byte[] bytesFromString = value.getBytes(Data.UTF_8_CHARSET); int bytes = bytesFromString.length + 1; byte[] bytesInBuffer = new byte[bytes]; System.arraycopy(bytesFromString, 0, bytesInBuffer, 0, bytes - 1); bytesInBuffer[bytes - 1] = 0; // must use package privet constructor break bytes into bufferSize byte buffers internally BufferChain bufferChain = new BufferChain(BufferChain.DEFAULT_ORDER, bytesInBuffer, bufferSize); bufferChain.rewind(); byte[] bytesFromBufferChain = new byte[bytes]; bufferChain.get(bytesFromBufferChain, 0, bytes); assertEquals(bytesFromBufferChain, bytesInBuffer); bufferChain.rewind(); String stringWithoutLength = bufferChain.getUtf8CString(); assertEquals(stringWithoutLength, value); bufferChain.rewind(); String stringGetWithLength = bufferChain.getUtf8CString(bytes); assertEquals(stringGetWithLength, value); } } }
public CompletableFuture<Optional<Account>> getByUsernameHash(final byte[] usernameHash) { final Timer.Sample sample = Timer.start(); return accounts.getByUsernameHash(usernameHash) .whenComplete((ignoredResult, ignoredThrowable) -> sample.stop(getByUsernameHashTimer)); }
@Test void testGetAccountByUsernameHash() { UUID uuid = UUID.randomUUID(); Account account = AccountsHelper.generateTestAccount("+14152222222", uuid, UUID.randomUUID(), new ArrayList<>(), new byte[UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH]); account.setUsernameHash(USERNAME_HASH_1); when(accounts.getByUsernameHash(USERNAME_HASH_1)) .thenReturn(CompletableFuture.completedFuture(Optional.of(account))); Optional<Account> retrieved = accountsManager.getByUsernameHash(USERNAME_HASH_1).join(); assertTrue(retrieved.isPresent()); assertSame(retrieved.get(), account); verify(accounts).getByUsernameHash(USERNAME_HASH_1); verifyNoMoreInteractions(accounts); }
public synchronized TopologyDescription describe() { return internalTopologyBuilder.describe(); }
@Test public void tableZeroArgCountShouldPreserveTopologyStructure() { final StreamsBuilder builder = new StreamsBuilder(); builder.table("input-topic") .groupBy((key, value) -> null) .count(); final Topology topology = builder.build(); final TopologyDescription describe = topology.describe(); assertEquals( "Topologies:\n" + " Sub-topology: 0\n" + " Source: KSTREAM-SOURCE-0000000001 (topics: [input-topic])\n" + " --> KTABLE-SOURCE-0000000002\n" + " Processor: KTABLE-SOURCE-0000000002 (stores: [input-topic-STATE-STORE-0000000000])\n" + " --> KTABLE-SELECT-0000000003\n" + " <-- KSTREAM-SOURCE-0000000001\n" + " Processor: KTABLE-SELECT-0000000003 (stores: [])\n" + " --> KSTREAM-SINK-0000000005\n" + " <-- KTABLE-SOURCE-0000000002\n" + " Sink: KSTREAM-SINK-0000000005 (topic: KTABLE-AGGREGATE-STATE-STORE-0000000004-repartition)\n" + " <-- KTABLE-SELECT-0000000003\n" + "\n" + " Sub-topology: 1\n" + " Source: KSTREAM-SOURCE-0000000006 (topics: [KTABLE-AGGREGATE-STATE-STORE-0000000004-repartition])\n" + " --> KTABLE-AGGREGATE-0000000007\n" + " Processor: KTABLE-AGGREGATE-0000000007 (stores: [KTABLE-AGGREGATE-STATE-STORE-0000000004])\n" + " --> none\n" + " <-- KSTREAM-SOURCE-0000000006\n" + "\n", describe.toString() ); topology.internalTopologyBuilder.setStreamsConfig(streamsConfig); final ProcessorTopology processorTopology = topology.internalTopologyBuilder.setApplicationId("test").buildTopology(); // one for ktable, and one for count operation assertThat(processorTopology.stateStores().size(), is(2)); // ktable store is rocksDB (default) assertThat(processorTopology.stateStores().get(0).persistent(), is(true)); // count store is rocksDB (default) assertThat(processorTopology.stateStores().get(1).persistent(), is(true)); }
@Override public Class<? extends StorageBuilder> builder() { return SumPerMinStorageBuilder.class; }
@Test public void testBuilder() throws IllegalAccessException, InstantiationException { long time = 1597113447737L; function.accept(MeterEntity.newService("sum_sync_time", Layer.GENERAL), time); function.calculate(); StorageBuilder<SumPerMinFunction> storageBuilder = function.builder().newInstance(); final HashMapConverter.ToStorage toStorage = new HashMapConverter.ToStorage(); storageBuilder.entity2Storage(function, toStorage); final Map<String, Object> map = toStorage.obtain(); map.put(SumPerMinFunction.VALUE, map.get(SumPerMinFunction.VALUE)); SumPerMinFunction function2 = storageBuilder.storage2Entity(new HashMapConverter.ToEntity(map)); assertThat(function2.getValue()).isEqualTo(function.getValue()); }
public static BBox parseTwoPoints(String objectAsString) { String[] splittedObject = objectAsString.split(","); if (splittedObject.length != 4) throw new IllegalArgumentException("BBox should have 4 parts but was " + objectAsString); double minLat = Double.parseDouble(splittedObject[0]); double minLon = Double.parseDouble(splittedObject[1]); double maxLat = Double.parseDouble(splittedObject[2]); double maxLon = Double.parseDouble(splittedObject[3]); return BBox.fromPoints(minLat, minLon, maxLat, maxLon); }
@Test public void testParseTwoPoints() { assertEquals(new BBox(2, 4, 1, 3), BBox.parseTwoPoints("1,2,3,4")); // stable parsing, i.e. if first point is in north or south it does not matter: assertEquals(new BBox(2, 4, 1, 3), BBox.parseTwoPoints("3,2,1,4")); }
@Override public void run() { if (!redoService.isConnected()) { LogUtils.NAMING_LOGGER.warn("Grpc Connection is disconnect, skip current redo task"); return; } try { redoForInstances(); redoForSubscribes(); } catch (Exception e) { LogUtils.NAMING_LOGGER.warn("Redo task run with unexpected exception: ", e); } }
@Test void testRunRedoWithDisconnection() { when(redoService.isConnected()).thenReturn(false); redoTask.run(); verify(redoService, never()).findInstanceRedoData(); verify(redoService, never()).findSubscriberRedoData(); }
@Override public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer) { return aggregate(initializer, Materialized.with(null, null)); }
@Test public void shouldNotHaveNullNamedOnAggregate() { assertThrows(NullPointerException.class, () -> windowedCogroupedStream.aggregate(MockInitializer.STRING_INIT, null, Materialized.as("test"))); }
public DoubleArrayAsIterable usingExactEquality() { return new DoubleArrayAsIterable(EXACT_EQUALITY_CORRESPONDENCE, iterableSubject()); }
@Test public void usingExactEquality_containsNoneOf_primitiveDoubleArray_failure() { expectFailureWhenTestingThat(array(1.1, 2.2, 3.3)) .usingExactEquality() .containsNoneOf(array(99.99, 2.2)); assertFailureKeys( "value of", "expected not to contain any of", "testing whether", "but contained", "corresponding to", "---", "full contents"); assertFailureValue("expected not to contain any of", "[99.99, 2.2]"); assertFailureValue("but contained", "[2.2]"); assertFailureValue("corresponding to", "2.2"); }
public CompletableFuture<Void> getAvailabilityFuture() { return currentFuture; }
@Test public void testUnavailableWhenEmpty() { final FutureCompletingBlockingQueue<Object> queue = new FutureCompletingBlockingQueue<>(); assertThat(queue.getAvailabilityFuture().isDone()).isFalse(); }
@Override public boolean contains(Object o) { for (M member : members) { if (selector.select(member) && o.equals(member)) { return true; } } return false; }
@Test public void testDoesNotContainThisMemberWhenLiteMembersSelectedAndNoLocalMember() { Collection<MemberImpl> collection = new MemberSelectingCollection<>(members, and(LITE_MEMBER_SELECTOR, NON_LOCAL_MEMBER_SELECTOR)); assertFalse(collection.contains(thisMember)); }
@Override public ObjectNode encode(Metric metric, CodecContext context) { checkNotNull(metric, "Metric cannot be null"); ObjectNode objectNode = context.mapper().createObjectNode(); ObjectNode dataNode = context.mapper().createObjectNode(); if (metric instanceof Counter) { dataNode.put(COUNTER, ((Counter) metric).getCount()); objectNode.set(COUNTER, dataNode); } else if (metric instanceof Gauge) { objectNode.put(VALUE, ((Gauge) metric).getValue().toString()); objectNode.set(GAUGE, dataNode); } else if (metric instanceof Meter) { dataNode.put(COUNTER, ((Meter) metric).getCount()); dataNode.put(MEAN_RATE, ((Meter) metric).getMeanRate()); dataNode.put(ONE_MIN_RATE, ((Meter) metric).getOneMinuteRate()); dataNode.put(FIVE_MIN_RATE, ((Meter) metric).getFiveMinuteRate()); dataNode.put(FIFT_MIN_RATE, ((Meter) metric).getFifteenMinuteRate()); objectNode.set(METER, dataNode); } else if (metric instanceof Histogram) { dataNode.put(COUNTER, ((Histogram) metric).getCount()); dataNode.put(MEAN, ((Histogram) metric).getSnapshot().getMean()); dataNode.put(MIN, ((Histogram) metric).getSnapshot().getMin()); dataNode.put(MAX, ((Histogram) metric).getSnapshot().getMax()); dataNode.put(STDDEV, ((Histogram) metric).getSnapshot().getStdDev()); objectNode.set(HISTOGRAM, dataNode); } else if (metric instanceof Timer) { dataNode.put(COUNTER, ((Timer) metric).getCount()); dataNode.put(MEAN_RATE, ((Timer) metric).getMeanRate()); dataNode.put(ONE_MIN_RATE, ((Timer) metric).getOneMinuteRate()); dataNode.put(FIVE_MIN_RATE, ((Timer) metric).getFiveMinuteRate()); dataNode.put(FIFT_MIN_RATE, ((Timer) metric).getFifteenMinuteRate()); dataNode.put(MEAN, nanoToMs(((Timer) metric).getSnapshot().getMean())); dataNode.put(MIN, nanoToMs(((Timer) metric).getSnapshot().getMin())); dataNode.put(MAX, nanoToMs(((Timer) metric).getSnapshot().getMax())); dataNode.put(STDDEV, nanoToMs(((Timer) metric).getSnapshot().getStdDev())); objectNode.set(TIMER, dataNode); } return objectNode; }
@Test public void testMetricEncode() { Counter counter = new Counter(); Meter meter = new Meter(); Timer timer = new Timer(); counter.inc(); meter.mark(); timer.update(1, TimeUnit.MILLISECONDS); ObjectNode counterJson = metricCodec.encode(counter, context); assertThat(counterJson.get("counter"), matchesMetric(counter)); ObjectNode meterJson = metricCodec.encode(meter, context); assertThat(meterJson.get("meter"), matchesMetric(meter)); ObjectNode timerJson = metricCodec.encode(timer, context); assertThat(timerJson.get("timer"), matchesMetric(timer)); }
@VisibleForTesting public static void validateAndResolveService(Service service, SliderFileSystem fs, org.apache.hadoop.conf.Configuration conf) throws IOException { boolean dnsEnabled = conf.getBoolean(RegistryConstants.KEY_DNS_ENABLED, RegistryConstants.DEFAULT_DNS_ENABLED); if (dnsEnabled) { if (RegistryUtils.currentUser().length() > RegistryConstants.MAX_FQDN_LABEL_LENGTH) { throw new IllegalArgumentException( RestApiErrorMessages.ERROR_USER_NAME_INVALID); } userNamePattern.validate(RegistryUtils.currentUser()); } if (StringUtils.isEmpty(service.getName())) { throw new IllegalArgumentException( RestApiErrorMessages.ERROR_APPLICATION_NAME_INVALID); } if (StringUtils.isEmpty(service.getVersion())) { throw new IllegalArgumentException(String.format( RestApiErrorMessages.ERROR_APPLICATION_VERSION_INVALID, service.getName())); } validateNameFormat(service.getName(), conf); // If the service has no components, throw error if (!hasComponent(service)) { throw new IllegalArgumentException( "No component specified for " + service.getName()); } if (UserGroupInformation.isSecurityEnabled()) { validateKerberosPrincipal(service.getKerberosPrincipal()); } // Validate the Docker client config. try { validateDockerClientConfiguration(service, conf); } catch (IOException e) { throw new IllegalArgumentException(e); } // Validate there are no component name collisions (collisions are not // currently supported) and add any components from external services Configuration globalConf = service.getConfiguration(); Set<String> componentNames = new HashSet<>(); List<Component> componentsToRemove = new ArrayList<>(); List<Component> componentsToAdd = new ArrayList<>(); for (Component comp : service.getComponents()) { int maxCompLength = RegistryConstants.MAX_FQDN_LABEL_LENGTH; maxCompLength = maxCompLength - Long.toString(Long.MAX_VALUE).length(); if (dnsEnabled && comp.getName().length() > maxCompLength) { throw new IllegalArgumentException(String.format(RestApiErrorMessages .ERROR_COMPONENT_NAME_INVALID, maxCompLength, comp.getName())); } if (service.getName().equals(comp.getName())) { throw new IllegalArgumentException(String.format(RestApiErrorMessages .ERROR_COMPONENT_NAME_CONFLICTS_WITH_SERVICE_NAME, comp.getName(), service.getName())); } if (componentNames.contains(comp.getName())) { throw new IllegalArgumentException("Component name collision: " + comp.getName()); } // If artifact is of type SERVICE (which cannot be filled from global), // read external service and add its components to this service if (comp.getArtifact() != null && comp.getArtifact().getType() == Artifact.TypeEnum.SERVICE) { if (StringUtils.isEmpty(comp.getArtifact().getId())) { throw new IllegalArgumentException( RestApiErrorMessages.ERROR_ARTIFACT_ID_INVALID); } LOG.info("Marking {} for removal", comp.getName()); componentsToRemove.add(comp); List<Component> externalComponents = getComponents(fs, comp.getArtifact().getId()); for (Component c : externalComponents) { Component override = service.getComponent(c.getName()); if (override != null && override.getArtifact() == null) { // allow properties from external components to be overridden / // augmented by properties in this component, except for artifact // which must be read from external component override.mergeFrom(c); LOG.info("Merging external component {} from external {}", c .getName(), comp.getName()); } else { if (componentNames.contains(c.getName())) { throw new IllegalArgumentException("Component name collision: " + c.getName()); } componentNames.add(c.getName()); componentsToAdd.add(c); LOG.info("Adding component {} from external {}", c.getName(), comp.getName()); } } } else { // otherwise handle as a normal component componentNames.add(comp.getName()); // configuration comp.getConfiguration().mergeFrom(globalConf); } } service.getComponents().removeAll(componentsToRemove); service.getComponents().addAll(componentsToAdd); // Validate components and let global values take effect if component level // values are not provided Artifact globalArtifact = service.getArtifact(); Resource globalResource = service.getResource(); for (Component comp : service.getComponents()) { // fill in global artifact unless it is type SERVICE if (comp.getArtifact() == null && service.getArtifact() != null && service.getArtifact().getType() != Artifact.TypeEnum .SERVICE) { comp.setArtifact(globalArtifact); } // fill in global resource if (comp.getResource() == null) { comp.setResource(globalResource); } // validate dependency existence if (comp.getDependencies() != null) { for (String dependency : comp.getDependencies()) { if (!componentNames.contains(dependency)) { throw new IllegalArgumentException(String.format( RestApiErrorMessages.ERROR_DEPENDENCY_INVALID, dependency, comp.getName())); } } } validateComponent(comp, fs.getFileSystem(), conf); } validatePlacementPolicy(service.getComponents(), componentNames); // validate dependency tree sortByDependencies(service.getComponents()); // Service lifetime if not specified, is set to unlimited lifetime if (service.getLifetime() == null) { service.setLifetime(RestApiConstants.DEFAULT_UNLIMITED_LIFETIME); } }
@Test public void testComponentNameSameAsServiceName() throws IOException { SliderFileSystem sfs = ServiceTestUtils.initMockFs(); Service app = new Service(); app.setName("test"); app.setVersion("v1"); app.addComponent(createValidComponent("test")); //component name same as service name try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "component name matches service name"); } catch (IllegalArgumentException e) { assertEquals("Component name test must not be same as service name test", e.getMessage()); } }
public static void deleteIfExists(final File file) { try { Files.deleteIfExists(file.toPath()); } catch (final IOException ex) { LangUtil.rethrowUnchecked(ex); } }
@Test void deleteIfExistsErrorHandlerFailsOnNonEmptyDirectory() throws IOException { final ErrorHandler errorHandler = mock(ErrorHandler.class); final Path dir = tempDir.resolve("dir"); Files.createDirectory(dir); Files.createFile(dir.resolve("file.txt")); IoUtil.deleteIfExists(dir.toFile(), errorHandler); verify(errorHandler).onError(isA(DirectoryNotEmptyException.class)); }
public synchronized FetchPosition positionOrNull(TopicPartition tp) { final TopicPartitionState state = assignedStateOrNull(tp); if (state == null) { return null; } return assignedState(tp).position; }
@Test public void testPositionOrNull() { state.assignFromUser(Collections.singleton(tp0)); final TopicPartition unassignedPartition = new TopicPartition("unassigned", 0); state.seek(tp0, 5); assertEquals(5, state.positionOrNull(tp0).offset); assertNull(state.positionOrNull(unassignedPartition)); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { if(file.getType().contains(Path.Type.upload)) { return new NullInputStream(0L); } final HttpRange range = HttpRange.withStatus(status); final RequestEntityRestStorageService client = session.getClient(); final Map<String, Object> requestHeaders = new HashMap<>(); final Map<String, String> requestParameters = new HashMap<>(); if(file.attributes().getVersionId() != null) { requestParameters.put("versionId", file.attributes().getVersionId()); } if(status.isAppend()) { final String header; if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } requestHeaders.put(HttpHeaders.RANGE, header); } final Path bucket = containerService.getContainer(file); final HttpResponse response = client.performRestGet(bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(file), requestParameters, requestHeaders, new int[]{HttpStatus.SC_PARTIAL_CONTENT, HttpStatus.SC_OK}); return new HttpMethodReleaseInputStream(response, status); } catch(ServiceException e) { throw new S3ExceptionMappingService().map("Download {0} failed", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Download {0} failed", e, file); } }
@Test public void testReadCloseReleaseEntity() throws Exception { final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path file = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final int length = 2048; final byte[] content = RandomUtils.nextBytes(length); final TransferStatus status = new TransferStatus().withLength(content.length); status.setChecksum(new SHA256ChecksumCompute().compute(new ByteArrayInputStream(content), status)); final OutputStream out = new S3WriteFeature(session, new S3AccessControlListFeature(session)).write(file, status, new DisabledConnectionCallback()); new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out); final CountingInputStream in = new CountingInputStream(new S3ReadFeature(session).read(file, status, new DisabledConnectionCallback())); in.close(); assertEquals(0L, in.getByteCount(), 0L); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public void appendOrOverwriteRegion(int subpartition, T newRegion) throws IOException { // This method will only be called when we want to eliminate a region. We can't let the // region be reloaded into the cache, otherwise it will lead to an infinite loop. long oldRegionOffset = findRegion(subpartition, newRegion.getFirstBufferIndex(), false); if (oldRegionOffset != -1) { // if region is already exists in file, overwrite it. writeRegionToOffset(oldRegionOffset, newRegion); } else { // otherwise, append region to region group. appendRegion(subpartition, newRegion); } }
@Test void testAppendOrOverwriteRegion() throws Exception { CompletableFuture<Void> cachedRegionFuture = new CompletableFuture<>(); try (FileDataIndexSpilledRegionManager<TestingFileDataIndexRegion> spilledRegionManager = createSpilledRegionManager( (ignore1, ignore2) -> cachedRegionFuture.complete(null))) { TestingFileDataIndexRegion region = createSingleTestRegion(0, 0L, 1); // append region to index file. spilledRegionManager.appendOrOverwriteRegion(0, region); assertThat(cachedRegionFuture).isNotCompleted(); FileChannel indexFileChannel = FileChannel.open(indexFilePath, StandardOpenOption.READ); TestingFileDataIndexRegion readRegion = readRegionFromFile(indexFileChannel, 0L); assertRegionEquals(readRegion, region); // new region must have the same size of old region. TestingFileDataIndexRegion newRegion = createSingleTestRegion(0, 10L, 1); // overwrite old region. spilledRegionManager.appendOrOverwriteRegion(0, newRegion); // appendOrOverwriteRegion will not trigger cache load. assertThat(cachedRegionFuture).isNotCompleted(); TestingFileDataIndexRegion readNewRegion = readRegionFromFile(indexFileChannel, 0L); assertRegionEquals(readNewRegion, newRegion); } }
public static Sensor processRecordsSensor(final String threadId, final StreamsMetricsImpl streamsMetrics) { final Sensor sensor = streamsMetrics.threadLevelSensor(threadId, PROCESS + RECORDS_SUFFIX, RecordingLevel.INFO); final Map<String, String> tagMap = streamsMetrics.threadLevelTagMap(threadId); addAvgAndMaxToSensor( sensor, THREAD_LEVEL_GROUP, tagMap, PROCESS + RECORDS_SUFFIX, PROCESS_AVG_RECORDS_DESCRIPTION, PROCESS_MAX_RECORDS_DESCRIPTION ); return sensor; }
@Test public void shouldGetProcessRecordsSensor() { final String operation = "process-records"; final String avgDescription = "The average number of records processed within an iteration"; final String maxDescription = "The maximum number of records processed within an iteration"; when(streamsMetrics.threadLevelSensor(THREAD_ID, operation, RecordingLevel.INFO)).thenReturn(expectedSensor); when(streamsMetrics.threadLevelTagMap(THREAD_ID)).thenReturn(tagMap); try (final MockedStatic<StreamsMetricsImpl> streamsMetricsStaticMock = mockStatic(StreamsMetricsImpl.class)) { final Sensor sensor = ThreadMetrics.processRecordsSensor(THREAD_ID, streamsMetrics); streamsMetricsStaticMock.verify( () -> StreamsMetricsImpl.addAvgAndMaxToSensor( expectedSensor, THREAD_LEVEL_GROUP, tagMap, operation, avgDescription, maxDescription ) ); assertThat(sensor, is(expectedSensor)); } }
Boolean processPayment() { try { ResponseEntity<Boolean> paymentProcessResult = restTemplateBuilder .build() .postForEntity("http://localhost:30301/payment/process", "processing payment", Boolean.class); LOGGER.info("Payment processing result: {}", paymentProcessResult.getBody()); return paymentProcessResult.getBody(); } catch (ResourceAccessException | HttpClientErrorException e) { LOGGER.error("Error communicating with payment service: {}", e.getMessage()); return false; } }
@Test void testProcessPayment() { // Arrange when(restTemplate.postForEntity(eq("http://localhost:30301/payment/process"), anyString(), eq(Boolean.class))) .thenReturn(ResponseEntity.ok(true)); // Act Boolean result = orderService.processPayment(); // Assert assertEquals(true, result); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { final SMBSession.DiskShareWrapper share = session.openShare(file); try { final File entry = share.get().openFile(new SMBPathContainerService(session).getKey(file), Collections.singleton(AccessMask.FILE_READ_DATA), Collections.singleton(FileAttributes.FILE_ATTRIBUTE_NORMAL), Collections.singleton(SMB2ShareAccess.FILE_SHARE_READ), SMB2CreateDisposition.FILE_OPEN, Collections.singleton(SMB2CreateOptions.FILE_NON_DIRECTORY_FILE)); final InputStream stream = entry.getInputStream(); if(status.isAppend()) { stream.skip(status.getOffset()); } return new SMBInputStream(file, stream, entry); } catch(SMBRuntimeException e) { throw new SMBExceptionMappingService().map("Download {0} failed", e, file); } catch(IOException e) { throw new SMBTransportExceptionMappingService().map("Download {0} failed", e, file); } finally { session.releaseShare(share); } }
@Test public void testReadWriteConcurrency() throws Exception { final Path home = new DefaultHomeFinderService(session).find(); final ExecutorService executor = Executors.newFixedThreadPool(50); final List<Future<Object>> results = new ArrayList<>(); for(int i = 0; i < 100; i++) { final Future<Object> submitted = executor.submit(new Callable<Object>() { @Override public Object call() throws Exception { final TransferStatus status = new TransferStatus(); final int length = 274; final byte[] content = RandomUtils.nextBytes(length); status.setLength(content.length); final Path folder = new SMBDirectoryFeature(session).mkdir( new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path test = new SMBTouchFeature(session).touch( new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final Write writer = new SMBWriteFeature(session); status.setChecksum(writer.checksum(test, status).compute(new ByteArrayInputStream(content), status)); final OutputStream out = writer.write(test, status.exists(true), new DisabledConnectionCallback()); assertNotNull(out); new StreamCopier(status, status).transfer(new ByteArrayInputStream(content), out); final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length); final InputStream in = new SMBReadFeature(session).read(test, new TransferStatus().withLength(content.length), new DisabledConnectionCallback()); new StreamCopier(status, status).transfer(in, buffer); assertArrayEquals(content, buffer.toByteArray()); return null; } }); results.add(submitted); } for(Future<Object> result : results) { result.get(); } executor.shutdown(); }
public ProviderBuilder server(String server) { this.server = server; return getThis(); }
@Test void server() { ProviderBuilder builder = ProviderBuilder.newBuilder(); builder.server("server"); Assertions.assertEquals("server", builder.build().getServer()); }
public static ModelLocalUriId getModelLocalUriIdObject(String localUriString) throws JsonProcessingException { return objectMapper.readValue(localUriString, ModelLocalUriId.class); }
@Test void getModelLocalUriIdObject() throws JsonProcessingException { String localUriIdString = "{\"model\":\"foo\",\"basePath\":\"/this/is/modelLocalUriId\",\"fullPath\":\"/foo/this/is/modelLocalUriId\"}"; ModelLocalUriId retrieved = JSONUtils.getModelLocalUriIdObject(localUriIdString); assertThat(retrieved).isNotNull(); }
public List<String> toList(boolean trim) { return toList((str) -> trim ? StrUtil.trim(str) : str); }
@Test public void splitByCharTrimTest(){ String str1 = "a, ,,efedsfs, ddf,"; SplitIter splitIter = new SplitIter(str1, new CharFinder(',', false), Integer.MAX_VALUE, true ); final List<String> strings = splitIter.toList(true); assertEquals(3, strings.size()); assertEquals("a", strings.get(0)); assertEquals("efedsfs", strings.get(1)); assertEquals("ddf", strings.get(2)); }
public boolean shouldRestartTask(TaskStatus status) { return includeTasks && (!onlyFailed || status.state() == AbstractStatus.State.FAILED); }
@Test public void restartAnyStatusTasks() { RestartRequest restartRequest = new RestartRequest(CONNECTOR_NAME, false, true); assertTrue(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.FAILED))); assertTrue(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.RUNNING))); assertTrue(restartRequest.shouldRestartTask(createTaskStatus(AbstractStatus.State.PAUSED))); }
@Override public Iterable<K> loadAllKeys() { // If loadAllKeys property is disabled, don't load anything if (!genericMapStoreProperties.loadAllKeys) { return Collections.emptyList(); } awaitSuccessfulInit(); String sql = queries.loadAllKeys(); SqlResult keysResult = sqlService.execute(sql); // The contract for loadAllKeys says that if iterator implements Closable // then it will be closed when the iteration is over return () -> new MappingClosingIterator<>( keysResult.iterator(), (SqlRow row) -> row.getObject(genericMapStoreProperties.idColumn), keysResult::close ); }
@Test public void givenFalse_whenLoadAllKeys_thenReturnNull() { ObjectSpec spec = objectProvider.createObject(mapName, true); objectProvider.insertItems(spec, 1); Properties properties = new Properties(); properties.setProperty(DATA_CONNECTION_REF_PROPERTY, TEST_DATABASE_REF); properties.setProperty(ID_COLUMN_PROPERTY, "person-id"); properties.setProperty(LOAD_ALL_KEYS_PROPERTY, "false"); mapLoader = createMapLoader(properties, hz); List<Integer> ids = newArrayList(mapLoader.loadAllKeys()); assertThat(ids).isEmpty(); }
@Deprecated @SuppressWarnings("deprecation") public void recordEviction() { // This method is scheduled for removal in version 3.0 in favor of recordEviction(weight) recordEviction(1); }
@Test public void evictionWithCause() { // With JUnit 5, this would be better done with @ParameterizedTest + @EnumSource for (RemovalCause cause : RemovalCause.values()) { stats.recordEviction(3, cause); assertThat(registry.histogram(PREFIX + ".evictions." + cause.name()).getCount()).isEqualTo(1); } }
@Override public void add(IndexSpec indexSpec) { checkIndexSpec(indexSpec); var indexName = indexSpec.getName(); var existingSpec = indexSpecs.putIfAbsent(indexName, indexSpec); if (existingSpec != null) { throw new IllegalArgumentException( "IndexSpec with name " + indexName + " already exists"); } }
@Test void add() { var specs = new DefaultIndexSpecs(); specs.add(primaryKeyIndexSpec(FakeExtension.class)); assertThat(specs.contains(PrimaryKeySpecUtils.PRIMARY_INDEX_NAME)).isTrue(); }
public static ParamType getVarArgsSchemaFromType(final Type type) { return getSchemaFromType(type, VARARGS_JAVA_TO_ARG_TYPE); }
@Test public void shouldGetBiFunctionVariadic() throws NoSuchMethodException { final Type type = getClass().getDeclaredMethod("biFunctionType", BiFunction.class) .getGenericParameterTypes()[0]; final ParamType schema = UdfUtil.getVarArgsSchemaFromType(type); assertThat(schema, instanceOf(LambdaType.class)); assertThat(((LambdaType) schema).inputTypes(), equalTo(ImmutableList.of(ParamTypes.LONG, ParamTypes.INTEGER))); assertThat(((LambdaType) schema).returnType(), equalTo(ParamTypes.BOOLEAN)); }
@Override public <KEY> URIMappingResult<KEY> mapUris(List<URIKeyPair<KEY>> requestUriKeyPairs) throws ServiceUnavailableException { if (requestUriKeyPairs == null || requestUriKeyPairs.isEmpty()) { return new URIMappingResult<>(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); } // API assumes that all requests will be made to the same service, just use the first request to get the service name and act as sample uri URI sampleURI = requestUriKeyPairs.get(0).getRequestUri(); String serviceName = LoadBalancerUtil.getServiceNameFromUri(sampleURI); // To achieve scatter-gather, we require the following information PartitionAccessor accessor = _partitionInfoProvider.getPartitionAccessor(serviceName); Map<Integer, Ring<URI>> rings = _hashRingProvider.getRings(sampleURI); HashFunction<Request> hashFunction = _hashRingProvider.getRequestHashFunction(serviceName); Map<Integer, Set<KEY>> unmapped = new HashMap<>(); // Pass One Map<Integer, List<URIKeyPair<KEY>>> requestsByPartition = distributeToPartitions(requestUriKeyPairs, accessor, unmapped); // Pass Two Map<URI, Integer> hostToParitionId = new HashMap<>(); Map<URI, Set<KEY>> hostToKeySet = distributeToHosts(requestsByPartition, rings, hashFunction, hostToParitionId, unmapped); return new URIMappingResult<>(hostToKeySet, unmapped, hostToParitionId); }
@Test public void testMapUrisStickyRoutingOnly() throws ServiceUnavailableException, PartitionAccessException { int partitionCount = 1; int requestPerPartition = 1000; int totalHostCount = 100; HashRingProvider ringProvider = createStaticHashRingProvider(totalHostCount, partitionCount, getHashFunction(true)); PartitionInfoProvider infoProvider = createRangeBasedPartitionInfoProvider(partitionCount); URIMapper mapper = new RingBasedUriMapper(ringProvider, infoProvider); List<URIKeyPair<Integer>> requests = testUtil.generateRequests(partitionCount, requestPerPartition); URIMappingResult<Integer> results1 = mapper.mapUris(requests); URIMappingResult<Integer> results2 = mapper.mapUris(requests); // Sticky routing between two runs Assert.assertEquals(results1.getMappedKeys(), results2.getMappedKeys()); Assert.assertEquals(results1.getUnmappedKeys(), results2.getUnmappedKeys()); Map<URI, Set<Integer>> mapping = results1.getMappedKeys(); // Testing universal stickiness, take out 50 requests randomly and make sure they would be resolved to the same host as does URIMapper Collections.shuffle(requests); HashFunction<Request> hashFunction = ringProvider.getRequestHashFunction(TEST_SERVICE); for (int i = 0; i < 50; i++) { URIKeyPair<Integer> request = requests.get(i); int partitionId = infoProvider.getPartitionAccessor(TEST_SERVICE).getPartitionId(request.getRequestUri()); Ring<URI> ring = ringProvider.getRings(request.getRequestUri()).get(partitionId); URI uri = ring.get(hashFunction.hash(new URIRequest(request.getRequestUri()))); Assert.assertTrue(mapping.keySet().contains(uri)); } // Only one partition Assert.assertEquals(1, new HashSet<>(results1.getHostPartitionInfo().values()).size()); Assert.assertEquals(1, new HashSet<>(results2.getHostPartitionInfo().values()).size()); }
protected String addDatetimeToFilename( String filename, boolean addDate, String datePattern, boolean addTime, String timePattern, boolean specifyFormat, String datetimeFormat ) { if ( Utils.isEmpty( filename ) ) { return null; } // Replace possible environment variables... String realfilename = environmentSubstitute( filename ); String filenameNoExtension = FilenameUtils.removeExtension( realfilename ); String extension = FilenameUtils.getExtension( realfilename ); // If an extension exists, add the corresponding dot before if ( !StringUtil.isEmpty( extension ) ) { extension = '.' + extension; } final SimpleDateFormat sdf = new SimpleDateFormat(); Date now = new Date(); if ( specifyFormat && !Utils.isEmpty( datetimeFormat ) ) { sdf.applyPattern( datetimeFormat ); String dt = sdf.format( now ); filenameNoExtension += dt; } else { if ( addDate && null != datePattern ) { sdf.applyPattern( datePattern ); String d = sdf.format( now ); filenameNoExtension += '_' + d; } if ( addTime && null != timePattern ) { sdf.applyPattern( timePattern ); String t = sdf.format( now ); filenameNoExtension += '_' + t; } } return filenameNoExtension + extension; }
@Test public void testAddDatetimeToFilename_ZipWithDotsInFolderWithoutDots() { JobEntryBase jobEntryBase = new JobEntryBase(); String fullFilename; String filename = "/folder_without_dots/zip.with.dots.in.folder.without.dots"; String regexFilename = regexDotEscape( filename ); // add nothing fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, false, null, false, null, false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + REGEX_EXTENSION, fullFilename ) ); // add date fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, "yyyyMMdd", false, null, false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + DATE_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, null, false, null, false, null ); assertNotNull( filename ); assertEquals( filename + EXTENSION, fullFilename ); // add time fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, false, null, true, "HHmmssSSS", false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + TIME_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, false, null, true, null, false, null ); assertNotNull( filename ); assertEquals( filename + EXTENSION, fullFilename ); // add date and time fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, "yyyyMMdd", true, "HHmmssSSS", false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + DATE_PATTERN + TIME_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, null, true, "HHmmssSSS", false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + TIME_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, "yyyyMMdd", true, null, false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + DATE_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, true, null, true, null, false, null ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + REGEX_EXTENSION, fullFilename ) ); // add datetime fullFilename = jobEntryBase .addDatetimeToFilename( filename + EXTENSION, false, null, false, null, true, "(yyyyMMdd_HHmmssSSS)" ); assertNotNull( fullFilename ); assertTrue( Pattern.matches( regexFilename + DATE_TIME_PATTERN + REGEX_EXTENSION, fullFilename ) ); fullFilename = jobEntryBase.addDatetimeToFilename( filename + EXTENSION, false, null, false, null, true, null ); assertNotNull( fullFilename ); assertEquals( filename + EXTENSION, fullFilename ); }
public static void readFully(InputStream stream, byte[] bytes, int offset, int length) throws IOException { int bytesRead = readRemaining(stream, bytes, offset, length); if (bytesRead < length) { throw new EOFException( "Reached the end of stream with " + (length - bytesRead) + " bytes left to read"); } }
@Test public void testReadFully() throws Exception { byte[] buffer = new byte[5]; MockInputStream stream = new MockInputStream(); IOUtil.readFully(stream, buffer, 0, buffer.length); assertThat(buffer) .as("Byte array contents should match") .isEqualTo(Arrays.copyOfRange(MockInputStream.TEST_ARRAY, 0, 5)); assertThat(stream.getPos()).as("Stream position should reflect bytes read").isEqualTo(5); }
@Override @Transactional(rollbackFor = Exception.class) public void updateJobStatus(Long id, Integer status) throws SchedulerException { // 校验 status if (!containsAny(status, JobStatusEnum.NORMAL.getStatus(), JobStatusEnum.STOP.getStatus())) { throw exception(JOB_CHANGE_STATUS_INVALID); } // 校验存在 JobDO job = validateJobExists(id); // 校验是否已经为当前状态 if (job.getStatus().equals(status)) { throw exception(JOB_CHANGE_STATUS_EQUALS); } // 更新 Job 状态 JobDO updateObj = JobDO.builder().id(id).status(status).build(); jobMapper.updateById(updateObj); // 更新状态 Job 到 Quartz 中 if (JobStatusEnum.NORMAL.getStatus().equals(status)) { // 开启 schedulerManager.resumeJob(job.getHandlerName()); } else { // 暂停 schedulerManager.pauseJob(job.getHandlerName()); } }
@Test public void testUpdateJobStatus_changeStatusInvalid() { // 调用,并断言异常 assertServiceException(() -> jobService.updateJobStatus(1L, JobStatusEnum.INIT.getStatus()), JOB_CHANGE_STATUS_INVALID); }
public static String trimStart( final String source, char c ) { if ( source == null ) { return null; } int length = source.length(); int index = 0; while ( index < length && source.charAt( index ) == c ) { index++; } return source.substring( index ); }
@Test public void testTrimStart_Many() { assertEquals( "file/path/", StringUtil.trimStart( "////file/path/", '/' ) ); }
@Override public Collection<String> getSystemSchemas() { return SYSTEM_SCHEMAS; }
@Test void assertGetSystemSchemas() { assertThat(systemDatabase.getSystemSchemas(), is(new HashSet<>(Arrays.asList("information_schema", "pg_catalog", "shardingsphere")))); }
@Override public boolean containsAll(IntSet set) { int size = set.size(); return size == 0 || size == 1 && set.contains(value); }
@Test public void testContainsAll() throws Exception { IntSet sis = new SingletonIntSet(3); IntSet sis2 = new SingletonIntSet(3); assertTrue(sis.containsAll(sis2)); assertTrue(sis2.containsAll(sis)); IntSet sis3 = new RangeSet(4); assertTrue(sis3.containsAll(sis)); assertFalse(sis.containsAll(sis3)); }
@Override public void publish(Message message) throws JMSException { getTopicPublisher().publish((Topic) getDestination(), message); }
@Test(timeout = 60000) public void testPooledConnectionFactory() throws Exception { ActiveMQTopic topic = new ActiveMQTopic("test"); pcf = new PooledConnectionFactory(); pcf.setConnectionFactory(new ActiveMQConnectionFactory( "vm://test?broker.persistent=false&broker.useJmx=false")); connection = (TopicConnection) pcf.createConnection(); TopicSession session = connection.createTopicSession(false, Session.AUTO_ACKNOWLEDGE); TopicPublisher publisher = session.createPublisher(topic); publisher.publish(session.createMessage()); }
@Operation(summary = "秒杀场景五(数据库原子性更新update set num = num -1)") @PostMapping("/procedure") public Result doWithProcedure(@RequestBody @Valid SeckillWebMockRequestDTO dto) { processSeckill(dto, ATOMIC_UPDATE); return Result.ok(); //待mq监听器处理完成打印日志,不在此处打印日志 }
@Test void doWithProcedure() { SeckillWebMockRequestDTO requestDTO = new SeckillWebMockRequestDTO(); requestDTO.setSeckillId(1L); requestDTO.setRequestCount(1); SeckillMockRequestDTO any = new SeckillMockRequestDTO(); any.setSeckillId(1L); Result response = seckillMockController.doWithProcedure(requestDTO); verify(seckillService, times(0)).execute(any(SeckillMockRequestDTO.class), anyInt()); assertEquals(0, response.getCode()); }
@Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (!httpAuth.isAllowed(req, resp)) { return; } // post du formulaire d'ajout d'application à monitorer I18N.bindLocale(req.getLocale()); try { addCollectorApplication(req, resp); } catch (final Exception e) { LOGGER.warn(e.toString(), e); final String userAgent = req.getHeader("User-Agent"); if (userAgent != null && userAgent.startsWith("Java")) { resp.sendError(HttpServletResponse.SC_PRECONDITION_FAILED, e.toString()); } else { final CollectorController collectorController = new CollectorController( collectorServer); final String application = collectorController.getApplication(req, resp); collectorController.writeMessage(req, resp, application, e.toString()); } } finally { I18N.unbindLocale(); } }
@Test public void testDoPost() throws ServletException, IOException { final List<String> nullUrl = Collections.singletonList(null); doPost(null, nullUrl, false); doPost(null, nullUrl, true); doPost(TEST, nullUrl, true); doPost(TEST, List.of("http://localhost:8090/test", "http://localhost:8090/test"), true); doPost(TEST, List.of("https://localhost:8090/test", "http://localhost:8090/test"), true); doPost(TEST, List.of("ftp://localhost:8090/test"), true); doPost(TEST, List.of("http://une url,pas une url"), true); }
public static <T> Inner<T> create() { return new Inner<>(); }
@Test @Category(NeedsRunner.class) public void renameTopLevelAndNestedFields() { Schema nestedSchema = Schema.builder().addStringField("field1").addInt32Field("field2").build(); Schema schema = Schema.builder().addStringField("field1").addRowField("nested", nestedSchema).build(); PCollection<Row> renamed = pipeline .apply( Create.of( Row.withSchema(schema) .addValues( "one", Row.withSchema(nestedSchema).addValues("one", 1).build()) .build(), Row.withSchema(schema) .addValues( "two", Row.withSchema(nestedSchema).addValues("two", 1).build()) .build()) .withRowSchema(schema)) .apply( RenameFields.<Row>create() .rename("field1", "top1") .rename("nested", "newnested") .rename("nested.field1", "new1") .rename("nested.field2", "new2")); Schema expectedNestedSchema = Schema.builder().addStringField("new1").addInt32Field("new2").build(); Schema expectedSchema = Schema.builder() .addStringField("top1") .addRowField("newnested", expectedNestedSchema) .build(); assertEquals(expectedSchema, renamed.getSchema()); List<Row> expectedRows = ImmutableList.of( Row.withSchema(expectedSchema) .addValues("one", Row.withSchema(expectedNestedSchema).addValues("one", 1).build()) .build(), Row.withSchema(expectedSchema) .addValues("two", Row.withSchema(expectedNestedSchema).addValues("two", 1).build()) .build()); PAssert.that(renamed).containsInAnyOrder(expectedRows); pipeline.run(); }
public static synchronized void configure(DataflowWorkerLoggingOptions options) { if (!initialized) { throw new RuntimeException("configure() called before initialize()"); } // For compatibility reason, we do not call SdkHarnessOptions.getConfiguredLoggerFromOptions // to config the logging for legacy worker, instead replicate the config steps used for // DataflowWorkerLoggingOptions for default log level and log level overrides. SdkHarnessOptions harnessOptions = options.as(SdkHarnessOptions.class); boolean usedDeprecated = false; // default value for both DefaultSdkHarnessLogLevel and DefaultWorkerLogLevel are INFO Level overrideLevel = getJulLevel(harnessOptions.getDefaultSdkHarnessLogLevel()); if (options.getDefaultWorkerLogLevel() != null && options.getDefaultWorkerLogLevel() != INFO) { overrideLevel = getJulLevel(options.getDefaultWorkerLogLevel()); usedDeprecated = true; } LogManager.getLogManager().getLogger(ROOT_LOGGER_NAME).setLevel(overrideLevel); if (options.getWorkerLogLevelOverrides() != null) { for (Map.Entry<String, DataflowWorkerLoggingOptions.Level> loggerOverride : options.getWorkerLogLevelOverrides().entrySet()) { Logger logger = Logger.getLogger(loggerOverride.getKey()); logger.setLevel(getJulLevel(loggerOverride.getValue())); configuredLoggers.add(logger); } usedDeprecated = true; } else if (harnessOptions.getSdkHarnessLogLevelOverrides() != null) { for (Map.Entry<String, SdkHarnessOptions.LogLevel> loggerOverride : harnessOptions.getSdkHarnessLogLevelOverrides().entrySet()) { Logger logger = Logger.getLogger(loggerOverride.getKey()); logger.setLevel(getJulLevel(loggerOverride.getValue())); configuredLoggers.add(logger); } } // If the options specify a level for messages logged to System.out/err, we need to reconfigure // the corresponding stream adapter. if (options.getWorkerSystemOutMessageLevel() != null) { System.out.close(); System.setOut( JulHandlerPrintStreamAdapterFactory.create( loggingHandler, SYSTEM_OUT_LOG_NAME, getJulLevel(options.getWorkerSystemOutMessageLevel()), Charset.defaultCharset())); } if (options.getWorkerSystemErrMessageLevel() != null) { System.err.close(); System.setErr( JulHandlerPrintStreamAdapterFactory.create( loggingHandler, SYSTEM_ERR_LOG_NAME, getJulLevel(options.getWorkerSystemErrMessageLevel()), Charset.defaultCharset())); } if (usedDeprecated) { LOG.warn( "Deprecated DataflowWorkerLoggingOptions are used for log level settings." + "Consider using options defined in SdkHarnessOptions for forward compatibility."); } }
@Test public void testWithSdkHarnessConfigurationOverride() { SdkHarnessOptions options = PipelineOptionsFactory.as(SdkHarnessOptions.class); options.setDefaultSdkHarnessLogLevel(SdkHarnessOptions.LogLevel.WARN); DataflowWorkerLoggingInitializer.configure(options.as(DataflowWorkerLoggingOptions.class)); Logger rootLogger = LogManager.getLogManager().getLogger(""); assertEquals(1, rootLogger.getHandlers().length); assertEquals(Level.WARNING, rootLogger.getLevel()); assertIsDataflowWorkerLoggingHandler(rootLogger.getHandlers()[0], Level.ALL); }
public String generateInvalidPayloadExceptionMessage(final byte[] hl7Bytes) { if (hl7Bytes == null) { return "HL7 payload is null"; } return generateInvalidPayloadExceptionMessage(hl7Bytes, hl7Bytes.length); }
@Test public void testGenerateInvalidPayloadExceptionMessageWithInvalidStartingSegment() throws Exception { byte[] invalidStartingSegment = "MSA|AA|00001|\r".getBytes(); byte[] basePayload = TEST_MESSAGE.getBytes(); ByteArrayOutputStream payloadStream = new ByteArrayOutputStream(invalidStartingSegment.length + basePayload.length); payloadStream.write(invalidStartingSegment); payloadStream.write(basePayload.length); assertEquals("The first segment of the HL7 payload {MSA} is not an MSH segment", hl7util.generateInvalidPayloadExceptionMessage(payloadStream.toByteArray())); }
@Override public OracleConnectorEmbeddedDebeziumConfiguration getConfiguration() { return configuration; }
@Test void testIfConnectorEndpointCreatedWithConfig() throws Exception { final Map<String, Object> params = new HashMap<>(); params.put("offsetStorageFileName", "/offset_test_file"); params.put("databaseHostname", "localhost"); params.put("databaseUser", "dbz"); params.put("databasePassword", "pwd"); params.put("topicPrefix", "test"); params.put("databaseServerId", 1234); params.put("schemaHistoryInternalFileFilename", "/db_history_file_test"); final String remaining = "test_name"; final String uri = "debezium?name=test_name&offsetStorageFileName=/test&" + "topicPrefix=localhost&databaseServerId=1234&databaseUser=dbz&databasePassword=pwd&" + "databaseServerName=test&schemaHistoryInternalFileFilename=/test"; try (final DebeziumComponent debeziumComponent = new DebeziumOracleComponent(new DefaultCamelContext())) { debeziumComponent.start(); final DebeziumEndpoint debeziumEndpoint = debeziumComponent.createEndpoint(uri, remaining, params); assertNotNull(debeziumEndpoint); // test for config final OracleConnectorEmbeddedDebeziumConfiguration configuration = (OracleConnectorEmbeddedDebeziumConfiguration) debeziumEndpoint.getConfiguration(); assertEquals("test_name", configuration.getName()); assertEquals("/offset_test_file", configuration.getOffsetStorageFileName()); assertEquals("localhost", configuration.getDatabaseHostname()); assertEquals("dbz", configuration.getDatabaseUser()); assertEquals("pwd", configuration.getDatabasePassword()); assertEquals("test", configuration.getTopicPrefix()); assertEquals("/db_history_file_test", configuration.getSchemaHistoryInternalFileFilename()); } }
@Override public synchronized void loadApiDocument() { if (!isEnabledLoad()) { return; } List<UpstreamInstance> serviceList = this.getAllClusterLastUpdateInstanceList(); if (CollectionUtils.isEmpty(serviceList)) { LOG.info("load api document No service registered."); return; } final Set<UpstreamInstance> currentServices = new HashSet<>(serviceList); LOG.info("load api document, serviceList={}", JsonUtils.toJson(currentServices)); pullSwaggerDocService.pullApiDocument(currentServices); }
@Test public void testLoadApiDocument() { ShenyuDictVO shenyuInitData = new ShenyuDictVO(); shenyuInitData.setDictValue("true"); when(shenyuDictService.findByDictCodeName(any(), any())).thenReturn(shenyuInitData); List<PluginDO> pluginDOList = new ArrayList<>(); PluginDO pluginDO = new PluginDO(); pluginDO.setId("1"); pluginDO.setName("test"); pluginDOList.add(pluginDO); CommonPager<SelectorVO> commonPager = new CommonPager<>(); List<SelectorVO> list = new ArrayList<>(); String dateString = "2023-05-06 03:48:48"; SimpleDateFormat inputFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); SimpleDateFormat outputFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); Date date = null; try { date = inputFormat.parse(dateString); } catch (ParseException e) { throw new RuntimeException(e); } String formattedDateString = outputFormat.format(date); SelectorVO selectorVO = new SelectorVO( "1", "1", "test", 1, "testMatchMode", 1, "testType", 1, true, true, true, true, "[{\"weight\":1}]", new ArrayList<>(), formattedDateString, formattedDateString, SYS_DEFAULT_NAMESPACE_ID ); list.add(selectorVO); commonPager.setDataList(list); commonPager.setPage(new PageParameter(1, 1)); List<CommonUpstream> upstreamList = new ArrayList<>(); upstreamList.add(new CommonUpstream("testProtocol", "testUpstreamHost", "testUrl", true, 1000L)); DiscoveryUpstreamData discoveryUpstreamData = new DiscoveryUpstreamData(); discoveryUpstreamData.setUrl("127.0.0.1:8080"); discoveryUpstreamData.setProps("{}"); discoveryUpstreamData.setDiscoveryHandlerId("1"); discoveryUpstreamData.setStatus(0); when(selectorService.listByPage(any())).thenReturn(commonPager); when(pluginMapper.selectByNames(any())).thenReturn(pluginDOList); when(discoveryUpstreamService.findBySelectorId(any())).thenReturn(Collections.singletonList(discoveryUpstreamData)); loadServiceDocEntry.loadApiDocument(); verify(pullSwaggerDocService).pullApiDocument((Set<UpstreamInstance>) any()); }
@Override public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer, final Merger<? super K, V> sessionMerger) { return aggregate(initializer, sessionMerger, Materialized.with(null, null)); }
@Test public void shouldNotHaveNullMaterializedOnAggregate() { assertThrows(NullPointerException.class, () -> windowedCogroupedStream.aggregate(MockInitializer.STRING_INIT, sessionMerger, (Named) null)); }
static PDImageXObject convertPNGImage(PDDocument doc, byte[] imageData) throws IOException { PNGConverterState state = parsePNGChunks(imageData); if (!checkConverterState(state)) { // There is something wrong, we can't convert this PNG return null; } return convertPng(doc, state); }
@Test void testImageConversionIntentIndexed() throws IOException { checkImageConvert("929316.png"); try (PDDocument doc = new PDDocument()) { InputStream in = PNGConverterTest.class.getResourceAsStream("929316.png"); byte[] imageBytes = in.readAllBytes(); PDImageXObject pdImageXObject = PNGConverter.convertPNGImage(doc, imageBytes); assertEquals(COSName.PERCEPTUAL, pdImageXObject.getCOSObject().getItem(COSName.INTENT)); // Check that this image gets an indexed colorspace with sRGB ICC based colorspace PDIndexed indexedColorspace = (PDIndexed) pdImageXObject.getColorSpace(); PDICCBased iccColorspace = (PDICCBased) indexedColorspace.getBaseColorSpace(); // validity of ICC CS is tested in checkImageConvert // should be an sRGB profile. Or at least, the data that is in ColorSpace.CS_sRGB and // that was assigned in PNGConvert. // (PDICCBased.is_sRGB() fails in openjdk on that data, maybe it is not a "real" sRGB) ICC_Profile rgbProfile = ICC_Profile.getInstance(ColorSpace.CS_sRGB); byte[] sRGB_bytes = rgbProfile.getData(); assertArrayEquals(sRGB_bytes, iccColorspace.getPDStream().toByteArray()); } }
public ECKey signingKey() { return signingKey; }
@Test void signingKey() throws JOSEException { var sut = new KeyStore(); var key = sut.signingKey(); assertEquals(KeyUse.SIGNATURE, key.getKeyUse()); assertNotNull(key.toECPrivateKey()); }
@Deprecated @Override public <K, T> List<RequestInfo> scatterRequest(Request<T> request, RequestContext requestContext, Map<URI, Set<K>> mappedKeys) { return defaultScatterRequestImpl(request, requestContext, mappedKeys); }
@Test(expectedExceptions = {IllegalArgumentException.class}) public void testUnsupportedRequestScatter() { Request<?> request = mock(Request.class); when(request.getMethod()).thenReturn(ResourceMethod.BATCH_CREATE); _sgStrategy.scatterRequest(request, new RequestContext(), new URIMappingResult<Long>(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap())); }
public static <T> Object create(Class<T> iface, T implementation, RetryPolicy retryPolicy) { return RetryProxy.create(iface, new DefaultFailoverProxyProvider<T>(iface, implementation), retryPolicy); }
@Test public void testNoRetryOnSaslError() throws Exception { RetryPolicy policy = mock(RetryPolicy.class); RetryPolicy realPolicy = RetryPolicies.failoverOnNetworkException(5); setupMockPolicy(policy, realPolicy); UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create( UnreliableInterface.class, unreliableImpl, policy); try { unreliable.failsWithSASLExceptionTenTimes(); fail("Should fail"); } catch (SaslException e) { // expected verify(policy, times(1)).shouldRetry(any(Exception.class), anyInt(), anyInt(), anyBoolean()); assertEquals(RetryDecision.FAIL, caughtRetryAction.action); } }
public static Write<String> write() { return new AutoValue_MongoDbGridFSIO_Write.Builder<String>() .setConnectionConfiguration(ConnectionConfiguration.create()) .setWriteFn( (output, outStream) -> { outStream.write(output.getBytes(StandardCharsets.UTF_8)); outStream.write('\n'); }) .build(); }
@Test public void testWriteMessage() throws Exception { ArrayList<String> data = new ArrayList<>(100); ArrayList<Integer> intData = new ArrayList<>(100); for (int i = 0; i < 1000; i++) { data.add("Message " + i); } for (int i = 0; i < 100; i++) { intData.add(i); } pipeline .apply("String", Create.of(data)) .apply( "StringInternal", MongoDbGridFSIO.write() .withUri("mongodb://localhost:" + port) .withDatabase(DATABASE) .withChunkSize(100L) .withBucket("WriteTest") .withFilename("WriteTestData")); pipeline .apply("WithWriteFn", Create.of(intData)) .apply( "WithWriteFnInternal", MongoDbGridFSIO.<Integer>write( (output, outStream) -> { // one byte per output outStream.write(output.byteValue()); }) .withUri("mongodb://localhost:" + port) .withDatabase(DATABASE) .withBucket("WriteTest") .withFilename("WriteTestIntData")); pipeline.run(); MongoClient client = null; try { StringBuilder results = new StringBuilder(); client = new MongoClient("localhost", port); DB database = client.getDB(DATABASE); GridFS gridfs = new GridFS(database, "WriteTest"); List<GridFSDBFile> files = gridfs.find("WriteTestData"); assertTrue(files.size() > 0); for (GridFSDBFile file : files) { assertEquals(100, file.getChunkSize()); int l = (int) file.getLength(); try (InputStream ins = file.getInputStream()) { DataInputStream dis = new DataInputStream(ins); byte[] b = new byte[l]; dis.readFully(b); results.append(new String(b, StandardCharsets.UTF_8)); } } String dataString = results.toString(); for (int x = 0; x < 1000; x++) { assertTrue(dataString.contains("Message " + x)); } files = gridfs.find("WriteTestIntData"); boolean[] intResults = new boolean[100]; for (GridFSDBFile file : files) { int l = (int) file.getLength(); try (InputStream ins = file.getInputStream()) { DataInputStream dis = new DataInputStream(ins); byte[] b = new byte[l]; dis.readFully(b); for (byte aB : b) { intResults[aB] = true; } } } for (int x = 0; x < 100; x++) { assertTrue("Did not get a result for " + x, intResults[x]); } } finally { if (client != null) { client.close(); } } }
static boolean isDialectSupported(SqlDialect dialect) { return dialect instanceof H2SqlDialect || dialect instanceof MssqlSqlDialect || dialect instanceof MysqlSqlDialect || dialect instanceof OracleSqlDialect || dialect instanceof PostgresqlSqlDialect; }
@Test public void testUpsertDialectNotSupported() { boolean result = SupportedDatabases.isDialectSupported(sybaseSqlDialect); assertFalse(result); }
@Override public void execute(ComputationStep.Context context) { Metric qProfilesMetric = metricRepository.getByKey(CoreMetrics.QUALITY_PROFILES_KEY); new PathAwareCrawler<>(new QProfileAggregationComponentVisitor(qProfilesMetric)) .visit(treeRootHolder.getRoot()); }
@Test public void add_quality_profile_measure_on_project() { treeRootHolder.setRoot(MULTI_MODULE_PROJECT); QualityProfile qpJava = createQProfile(QP_NAME_1, LANGUAGE_KEY_1); QualityProfile qpPhp = createQProfile(QP_NAME_2, LANGUAGE_KEY_2); analysisMetadataHolder.setQProfilesByLanguage(ImmutableMap.of(LANGUAGE_KEY_1, qpJava, LANGUAGE_KEY_2, qpPhp)); underTest.execute(new TestComputationStepContext()); assertThat(measureRepository.getAddedRawMeasures(PROJECT_REF).get(QUALITY_PROFILES_KEY)) .extracting("data").isEqualTo(toJson(qpJava, qpPhp)); }
public static boolean isLinkLocalIPv6WithZoneIndex(String input) { if (input.length() > FIVE && input.substring(ZERO, FIVE).equalsIgnoreCase(FE80)) { int lastIndex = input.lastIndexOf(PERCENT); if (lastIndex > ZERO && lastIndex < (input.length() - 1)) { String ipPart = input.substring(ZERO, lastIndex); return isIPv6StdAddress(ipPart) || isIPv6HexCompressedAddress(ipPart); } } return false; }
@Test void isLinkLocalIPv6WithZoneIndex() { assertTrue(InetAddressValidator.isLinkLocalIPv6WithZoneIndex("fe80::1%lo0")); assertFalse(InetAddressValidator.isLinkLocalIPv6WithZoneIndex("2000:0000:0000:0000:0001:2345:6789:abcd")); }
public void setIdentity(final Local file) { this.identity = file; this.passed = false; }
@Test public void testSetIdentity() throws Exception { Credentials c = new Credentials(); c.setIdentity(new Local("~/.ssh/unknown.rsa")); assertFalse(c.isPublicKeyAuthentication()); final Local t = new Local(PreferencesFactory.get().getProperty("tmp.dir"), "id_rsa"); LocalTouchFactory.get().touch(t); c.setIdentity(t); assertTrue(c.isPublicKeyAuthentication()); t.delete(); }
public static String encodeParams(String urlWithParams, Charset charset) { if (StrUtil.isBlank(urlWithParams)) { return StrUtil.EMPTY; } String urlPart = null; // url部分,不包括问号 String paramPart; // 参数部分 final int pathEndPos = urlWithParams.indexOf('?'); if (pathEndPos > -1) { // url + 参数 urlPart = StrUtil.subPre(urlWithParams, pathEndPos); paramPart = StrUtil.subSuf(urlWithParams, pathEndPos + 1); if (StrUtil.isBlank(paramPart)) { // 无参数,返回url return urlPart; } } else if (false == StrUtil.contains(urlWithParams, '=')) { // 无参数的URL return urlWithParams; } else { // 无URL的参数 paramPart = urlWithParams; } paramPart = normalizeParams(paramPart, charset); return StrUtil.isBlank(urlPart) ? paramPart : urlPart + "?" + paramPart; }
@Test public void encodeParamTest() { // ?单独存在去除之,&单位位于末尾去除之 String paramsStr = "?a=b&c=d&"; String encode = HttpUtil.encodeParams(paramsStr, CharsetUtil.CHARSET_UTF_8); assertEquals("a=b&c=d", encode); // url不参与转码 paramsStr = "http://www.abc.dd?a=b&c=d&"; encode = HttpUtil.encodeParams(paramsStr, CharsetUtil.CHARSET_UTF_8); assertEquals("http://www.abc.dd?a=b&c=d", encode); // b=b中的=被当作值的一部分,不做encode paramsStr = "a=b=b&c=d&"; encode = HttpUtil.encodeParams(paramsStr, CharsetUtil.CHARSET_UTF_8); assertEquals("a=b=b&c=d", encode); // =d的情况被处理为key为空 paramsStr = "a=bbb&c=d&=d"; encode = HttpUtil.encodeParams(paramsStr, CharsetUtil.CHARSET_UTF_8); assertEquals("a=bbb&c=d&=d", encode); // d=的情况被处理为value为空 paramsStr = "a=bbb&c=d&d="; encode = HttpUtil.encodeParams(paramsStr, CharsetUtil.CHARSET_UTF_8); assertEquals("a=bbb&c=d&d=", encode); // 多个&&被处理为单个,相当于空条件 paramsStr = "a=bbb&c=d&&&d="; encode = HttpUtil.encodeParams(paramsStr, CharsetUtil.CHARSET_UTF_8); assertEquals("a=bbb&c=d&d=", encode); // &d&相当于只有键,无值得情况 paramsStr = "a=bbb&c=d&d&"; encode = HttpUtil.encodeParams(paramsStr, CharsetUtil.CHARSET_UTF_8); assertEquals("a=bbb&c=d&d=", encode); // 中文的键和值被编码 paramsStr = "a=bbb&c=你好&哈喽&"; encode = HttpUtil.encodeParams(paramsStr, CharsetUtil.CHARSET_UTF_8); assertEquals("a=bbb&c=%E4%BD%A0%E5%A5%BD&%E5%93%88%E5%96%BD=", encode); // URL原样输出 paramsStr = "https://www.hutool.cn/"; encode = HttpUtil.encodeParams(paramsStr, CharsetUtil.CHARSET_UTF_8); assertEquals(paramsStr, encode); // URL原样输出 paramsStr = "https://www.hutool.cn/?"; encode = HttpUtil.encodeParams(paramsStr, CharsetUtil.CHARSET_UTF_8); assertEquals("https://www.hutool.cn/", encode); }
public synchronized boolean remove(long item1, long item2) { boolean removed = false; int index = 0; for (int i = 0; i < this.size; i++) { if (data[index] == item1 && data[index + 1] == item2) { removeAtWithoutLock(index); removed = true; } index = index + 2; } return removed; }
@Test public void testRemove() { GrowablePriorityLongPairQueue queue = new GrowablePriorityLongPairQueue(); assertTrue(queue.isEmpty()); queue.add(1, 1); assertFalse(queue.isEmpty()); assertFalse(queue.remove(1, 0)); assertFalse(queue.isEmpty()); assertTrue(queue.remove(1, 1)); assertTrue(queue.isEmpty()); }
@Nonnull public static <T, A> AggregateOperation1<T, MutableReference<A>, A> reducing( @Nonnull A emptyAccValue, @Nonnull FunctionEx<? super T, ? extends A> toAccValueFn, @Nonnull BinaryOperatorEx<A> combineAccValuesFn, @Nullable BinaryOperatorEx<A> deductAccValueFn ) { checkSerializable(emptyAccValue, "emptyAccValue"); checkSerializable(toAccValueFn, "toAccValueFn"); checkSerializable(combineAccValuesFn, "combineAccValuesFn"); checkSerializable(deductAccValueFn, "deductAccValueFn"); @SuppressWarnings("UnnecessaryLocalVariable") BinaryOperatorEx<A> deductFn = deductAccValueFn; return AggregateOperation .withCreate(() -> new MutableReference<>(emptyAccValue)) .andAccumulate((MutableReference<A> a, T t) -> a.set(combineAccValuesFn.apply(a.get(), toAccValueFn.apply(t)))) .andCombine((a, b) -> a.set(combineAccValuesFn.apply(a.get(), b.get()))) .andDeduct(deductFn != null ? (a, b) -> a.set(deductFn.apply(a.get(), b.get())) : null) .andExportFinish(MutableReference::get); }
@Test public void when_reducing() { validateOp(reducing(0, Integer::intValue, Integer::sum, (x, y) -> x - y), MutableReference::get, 1, 2, 1, 3, 3); }
@Override protected void write(final PostgreSQLPacketPayload payload) { payload.writeInt4(AUTH_REQ_SHA256); payload.writeInt4(PASSWORD_STORED_METHOD_SHA256); payload.writeBytes(authHexData.getSalt().getBytes()); payload.writeBytes(authHexData.getNonce().getBytes()); if (version < OpenGaussProtocolVersion.PROTOCOL_350.getVersion()) { payload.writeBytes(serverSignature.getBytes()); } if (OpenGaussProtocolVersion.PROTOCOL_351.getVersion() == version) { payload.writeInt4(serverIteration); } }
@Test void assertWriteProtocol300Packet() { PostgreSQLPacketPayload payload = mock(PostgreSQLPacketPayload.class); OpenGaussAuthenticationSCRAMSha256Packet packet = new OpenGaussAuthenticationSCRAMSha256Packet(OpenGaussProtocolVersion.PROTOCOL_350.getVersion() - 1, 2048, authHexData, ""); packet.write(payload); verify(payload).writeInt4(10); verify(payload).writeInt4(2); verify(payload).writeBytes(authHexData.getSalt().getBytes()); verify(payload).writeBytes(authHexData.getNonce().getBytes()); verify(payload, times(3)).writeBytes(any()); }
public ParsedQuery parse(final String query) throws ParseException { final TokenCollectingQueryParser parser = new TokenCollectingQueryParser(ParsedTerm.DEFAULT_FIELD, ANALYZER); parser.setSplitOnWhitespace(true); parser.setAllowLeadingWildcard(allowLeadingWildcard); final Query parsed = parser.parse(query); final ParsedQuery.Builder builder = ParsedQuery.builder().query(query); builder.tokensBuilder().addAll(parser.getTokens()); final TermCollectingQueryVisitor visitor = new TermCollectingQueryVisitor(ANALYZER, parser.getTokenLookup()); parsed.visit(visitor); builder.termsBuilder().addAll(visitor.getParsedTerms()); return builder.build(); }
@Test void getFieldNamesNot() throws ParseException { final ParsedQuery parsedQuery = parser.parse("NOT _exists_ : type"); assertThat(parsedQuery.allFieldNames()).contains("type"); }
@VisibleForTesting AzureADToken getTokenUsingJWTAssertion(String clientAssertion) throws IOException { return AzureADAuthenticator .getTokenUsingJWTAssertion(authEndpoint, clientId, clientAssertion); }
@Test public void testTokenFetchWithEmptyTokenFile() throws Exception { File tokenFile = File.createTempFile("azure-identity-token", "txt"); AzureADToken azureAdToken = new AzureADToken(); WorkloadIdentityTokenProvider tokenProvider = Mockito.spy( new WorkloadIdentityTokenProvider(AUTHORITY, TENANT_ID, CLIENT_ID, tokenFile.getPath())); Mockito.doReturn(azureAdToken) .when(tokenProvider).getTokenUsingJWTAssertion(TOKEN); IOException ex = intercept(IOException.class, () -> { tokenProvider.getToken(); }); Assertions.assertThat(ex.getMessage()) .describedAs("Exception should be thrown when the token file is empty") .contains("Empty token file"); }
@VisibleForTesting public ListenableFuture<?> internalExecute(CreateTable statement, Metadata metadata, AccessControl accessControl, Session session, List<Expression> parameters, WarningCollector warningCollector) { checkArgument(!statement.getElements().isEmpty(), "no columns for table"); Map<NodeRef<Parameter>, Expression> parameterLookup = parameterExtractor(statement, parameters); QualifiedObjectName tableName = createQualifiedObjectName(session, statement, statement.getName()); Optional<TableHandle> tableHandle = metadata.getMetadataResolver(session).getTableHandle(tableName); if (tableHandle.isPresent()) { if (!statement.isNotExists()) { throw new SemanticException(TABLE_ALREADY_EXISTS, statement, "Table '%s' already exists", tableName); } return immediateFuture(null); } ConnectorId connectorId = metadata.getCatalogHandle(session, tableName.getCatalogName()) .orElseThrow(() -> new PrestoException(NOT_FOUND, "Catalog does not exist: " + tableName.getCatalogName())); LinkedHashMap<String, ColumnMetadata> columns = new LinkedHashMap<>(); Map<String, Object> inheritedProperties = ImmutableMap.of(); boolean includingProperties = false; List<TableConstraint<String>> constraints = new ArrayList<>(); for (TableElement element : statement.getElements()) { if (element instanceof ColumnDefinition) { ColumnDefinition column = (ColumnDefinition) element; String name = column.getName().getValue().toLowerCase(Locale.ENGLISH); Type type; try { type = metadata.getType(parseTypeSignature(column.getType())); } catch (IllegalArgumentException e) { throw new SemanticException(TYPE_MISMATCH, element, "Unknown type '%s' for column '%s'", column.getType(), column.getName()); } if (type.equals(UNKNOWN)) { throw new SemanticException(TYPE_MISMATCH, element, "Unknown type '%s' for column '%s'", column.getType(), column.getName()); } if (columns.containsKey(name)) { throw new SemanticException(DUPLICATE_COLUMN_NAME, column, "Column name '%s' specified more than once", column.getName()); } if (!column.isNullable() && !metadata.getConnectorCapabilities(session, connectorId).contains(NOT_NULL_COLUMN_CONSTRAINT)) { throw new SemanticException(NOT_SUPPORTED, column, "Catalog '%s' does not support non-null column for column name '%s'", connectorId.getCatalogName(), column.getName()); } Map<String, Expression> sqlProperties = mapFromProperties(column.getProperties()); Map<String, Object> columnProperties = metadata.getColumnPropertyManager().getProperties( connectorId, tableName.getCatalogName(), sqlProperties, session, metadata, parameterLookup); columns.put(name, new ColumnMetadata( name, type, column.isNullable(), column.getComment().orElse(null), null, false, columnProperties)); } else if (element instanceof LikeClause) { LikeClause likeClause = (LikeClause) element; QualifiedObjectName likeTableName = createQualifiedObjectName(session, statement, likeClause.getTableName()); if (!metadata.getCatalogHandle(session, likeTableName.getCatalogName()).isPresent()) { throw new SemanticException(MISSING_CATALOG, statement, "LIKE table catalog '%s' does not exist", likeTableName.getCatalogName()); } if (!tableName.getCatalogName().equals(likeTableName.getCatalogName())) { throw new SemanticException(NOT_SUPPORTED, statement, "LIKE table across catalogs is not supported"); } TableHandle likeTable = metadata.getMetadataResolver(session).getTableHandle(likeTableName) .orElseThrow(() -> new SemanticException(MISSING_TABLE, statement, "LIKE table '%s' does not exist", likeTableName)); TableMetadata likeTableMetadata = metadata.getTableMetadata(session, likeTable); Optional<LikeClause.PropertiesOption> propertiesOption = likeClause.getPropertiesOption(); if (propertiesOption.isPresent() && propertiesOption.get().equals(LikeClause.PropertiesOption.INCLUDING)) { if (includingProperties) { throw new SemanticException(NOT_SUPPORTED, statement, "Only one LIKE clause can specify INCLUDING PROPERTIES"); } includingProperties = true; inheritedProperties = likeTableMetadata.getMetadata().getProperties(); } likeTableMetadata.getColumns().stream() .filter(column -> !column.isHidden()) .forEach(column -> { if (columns.containsKey(column.getName().toLowerCase(Locale.ENGLISH))) { throw new SemanticException(DUPLICATE_COLUMN_NAME, element, "Column name '%s' specified more than once", column.getName()); } columns.put(column.getName().toLowerCase(Locale.ENGLISH), column); }); } else if (element instanceof ConstraintSpecification) { accessControl.checkCanAddConstraints(session.getRequiredTransactionId(), session.getIdentity(), session.getAccessControlContext(), tableName); constraints.add(convertToTableConstraint(metadata, session, connectorId, (ConstraintSpecification) element, warningCollector)); } else { throw new PrestoException(GENERIC_INTERNAL_ERROR, "Invalid TableElement: " + element.getClass().getName()); } } accessControl.checkCanCreateTable(session.getRequiredTransactionId(), session.getIdentity(), session.getAccessControlContext(), tableName); constraints.stream() .filter(c -> c.getName().isPresent()) .collect(Collectors.groupingBy(c -> c.getName().get(), Collectors.counting())) .forEach((constraintName, count) -> { if (count > 1) { throw new PrestoException(SYNTAX_ERROR, format("Constraint name '%s' specified more than once", constraintName)); } }); if (constraints.stream() .filter(PrimaryKeyConstraint.class::isInstance) .collect(Collectors.groupingBy(c -> c.getName().orElse(""), Collectors.counting())) .size() > 1) { throw new PrestoException(SYNTAX_ERROR, "Multiple primary key constraints are not allowed"); } Map<String, Expression> sqlProperties = mapFromProperties(statement.getProperties()); Map<String, Object> properties = metadata.getTablePropertyManager().getProperties( connectorId, tableName.getCatalogName(), sqlProperties, session, metadata, parameterLookup); Map<String, Object> finalProperties = combineProperties(sqlProperties.keySet(), properties, inheritedProperties); ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(toSchemaTableName(tableName), ImmutableList.copyOf(columns.values()), finalProperties, statement.getComment(), constraints, Collections.emptyMap()); try { metadata.createTable(session, tableName.getCatalogName(), tableMetadata, statement.isNotExists()); } catch (PrestoException e) { // connectors are not required to handle the ignoreExisting flag if (!e.getErrorCode().equals(ALREADY_EXISTS.toErrorCode()) || !statement.isNotExists()) { throw e; } } return immediateFuture(null); }
@Test(expectedExceptions = SemanticException.class, expectedExceptionsMessageRegExp = ".*does not support Primary Key constraints") public void testCreateWithPrimaryKeyConstraintWithUnsupportedConnector() { List<TableElement> inputColumns = ImmutableList.of( new ColumnDefinition(identifier("a"), "DATE", true, emptyList(), Optional.empty()), new ColumnDefinition(identifier("b"), "VARCHAR", true, emptyList(), Optional.empty()), new ColumnDefinition(identifier("c"), "VARBINARY", true, emptyList(), Optional.empty()), new ConstraintSpecification(Optional.of("pk"), ImmutableList.of("a"), PRIMARY_KEY, true, true, false)); CreateTable statement = new CreateTable(QualifiedName.of("test_table"), inputColumns, true, ImmutableList.of(), Optional.empty()); getFutureValue(new CreateTableTask().internalExecute(statement, metadata, new AllowAllAccessControl(), testSession, emptyList(), warningCollector)); }