focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Executor getExecutor(URL url) { String name = url.getParameter(THREAD_NAME_KEY, (String) url.getAttribute(THREAD_NAME_KEY, DEFAULT_THREAD_NAME)); int cores = url.getParameter(CORE_THREADS_KEY, DEFAULT_CORE_THREADS); int threads = url.getParameter(THREADS_KEY, Integer.MAX_VALUE); int queues = url.getParameter(QUEUES_KEY, DEFAULT_QUEUES); int alive = url.getParameter(ALIVE_KEY, DEFAULT_ALIVE); // init queue and executor TaskQueue<Runnable> taskQueue = new TaskQueue<>(queues <= 0 ? 1 : queues); EagerThreadPoolExecutor executor = new EagerThreadPoolExecutor( cores, threads, alive, TimeUnit.MILLISECONDS, taskQueue, new NamedInternalThreadFactory(name, true), new AbortPolicyWithReport(name, url)); taskQueue.setExecutor(executor); return executor; }
@Test void getExecutor2() { URL url = URL.valueOf("dubbo://10.20.130.230:20880/context/path?" + QUEUES_KEY + "=2"); ThreadPool threadPool = new EagerThreadPool(); ThreadPoolExecutor executor = (ThreadPoolExecutor) threadPool.getExecutor(url); assertThat(executor.getQueue().remainingCapacity(), is(2)); }
@ShellMethod(key = "rename partition", value = "Rename partition. Usage: rename partition --oldPartition <oldPartition> --newPartition <newPartition>") public String renamePartition( @ShellOption(value = {"--oldPartition"}, help = "Partition value to be renamed") String oldPartition, @ShellOption(value = {"--newPartition"}, help = "New partition value after rename") String newPartition, @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", defaultValue = "") String sparkPropertiesPath, @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory) throws Exception { if (StringUtils.isNullOrEmpty(sparkPropertiesPath)) { sparkPropertiesPath = Utils.getDefaultPropertiesFile(JavaConverters.mapAsScalaMapConverter(System.getenv()).asScala()); } SparkLauncher sparkLauncher = SparkUtil.initLauncher(sparkPropertiesPath); sparkLauncher.addAppArgs(SparkMain.SparkCommand.RENAME_PARTITION.toString(), master, sparkMemory, HoodieCLI.basePath, oldPartition, newPartition); Process process = sparkLauncher.launch(); InputStreamConsumer.captureOutput(process); int exitCode = process.waitFor(); if (exitCode != 0) { return "rename partition failed!"; } return "rename partition succeeded"; }
@Test public void testRenamePartition() throws IOException { tablePath = tablePath + "/rename_partition_test/"; HoodieTableMetaClient.withPropertyBuilder() .setTableType(HoodieTableType.COPY_ON_WRITE.name()) .setTableName(tableName()) .setArchiveLogFolder(HoodieTableConfig.ARCHIVELOG_FOLDER.defaultValue()) .setPayloadClassName("org.apache.hudi.common.model.HoodieAvroPayload") .setTimelineLayoutVersion(TimelineLayoutVersion.VERSION_1) .setPartitionFields("partition_path") .setRecordKeyFields("_row_key") .setKeyGeneratorClassProp(SimpleKeyGenerator.class.getCanonicalName()) .initTable(HoodieCLI.conf.newInstance(), tablePath); HoodieTestDataGenerator dataGen = new HoodieTestDataGenerator(); HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(tablePath).withSchema(TRIP_EXAMPLE_SCHEMA).build(); try (SparkRDDWriteClient client = new SparkRDDWriteClient(context(), config)) { String newCommitTime = "001"; int numRecords = 20; client.startCommitWithTime(newCommitTime); List<HoodieRecord> records = dataGen.generateInserts(newCommitTime, numRecords); JavaRDD<HoodieRecord> writeRecords = context().getJavaSparkContext().parallelize(records, 1); List<WriteStatus> result = client.upsert(writeRecords, newCommitTime).collect(); Assertions.assertNoWriteErrors(result); SQLContext sqlContext = context().getSqlContext(); long totalRecs = sqlContext.read().format("hudi").load(tablePath).count(); assertEquals(totalRecs, 20); long totalRecsInOldPartition = sqlContext.read().format("hudi").load(tablePath) .filter(HoodieRecord.PARTITION_PATH_METADATA_FIELD + " == '" + DEFAULT_FIRST_PARTITION_PATH + "'").count(); // Execute rename partition command assertEquals(0, SparkMain.renamePartition(jsc(), tablePath, DEFAULT_FIRST_PARTITION_PATH, "2016/03/18")); // there should not be any records in old partition totalRecs = sqlContext.read().format("hudi").load(tablePath) .filter(HoodieRecord.PARTITION_PATH_METADATA_FIELD + " == '" + DEFAULT_FIRST_PARTITION_PATH + "'").count(); assertEquals(totalRecs, 0); // all records from old partition should have been migrated to new partition totalRecs = sqlContext.read().format("hudi").load(tablePath) .filter(HoodieRecord.PARTITION_PATH_METADATA_FIELD + " == \"" + "2016/03/18" + "\"").count(); assertEquals(totalRecs, totalRecsInOldPartition); } }
public boolean test(final IndexRange indexRange, final Set<Stream> validStreams) { // If index range is incomplete, check the prefix against the valid index sets. if (indexRange.streamIds() == null) { return validStreams.stream() .map(Stream::getIndexSet) .anyMatch(indexSet -> indexSet.isManagedIndex(indexRange.indexName())); } final Set<String> validStreamIds = validStreams.stream() .map(Stream::getId) .collect(Collectors.toSet()); // Otherwise check if the index range contains any of the valid stream ids. return !Collections.disjoint(indexRange.streamIds(), validStreamIds); }
@Test public void currentIndexRangeShouldNotMatchIfNotManaged() { when(indexRange.streamIds()).thenReturn(null); when(indexRange.indexName()).thenReturn(indexName); when(stream1.getIndexSet().isManagedIndex(eq(indexName))).thenReturn(false); when(stream2.getIndexSet().isManagedIndex(eq(indexName))).thenReturn(false); assertThat(toTest.test(indexRange, Set.of(stream1, stream2))).isFalse(); final ArgumentCaptor<String> stream1IndexSet = ArgumentCaptor.forClass(String.class); verify(stream1.getIndexSet(), times(1)).isManagedIndex(stream1IndexSet.capture()); assertThat(stream1IndexSet.getAllValues()).containsExactly(indexName); final ArgumentCaptor<String> stream2IndexSet = ArgumentCaptor.forClass(String.class); verify(stream2.getIndexSet(), times(1)).isManagedIndex(stream2IndexSet.capture()); assertThat(stream2IndexSet.getAllValues()).containsExactly(indexName); }
@Override public void open() { super.open(); for (String propertyKey : properties.stringPropertyNames()) { LOGGER.debug("propertyKey: {}", propertyKey); String[] keyValue = propertyKey.split("\\.", 2); if (2 == keyValue.length) { LOGGER.debug("key: {}, value: {}", keyValue[0], keyValue[1]); Properties prefixProperties; if (basePropertiesMap.containsKey(keyValue[0])) { prefixProperties = basePropertiesMap.get(keyValue[0]); } else { prefixProperties = new Properties(); basePropertiesMap.put(keyValue[0].trim(), prefixProperties); } prefixProperties.put(keyValue[1].trim(), getProperty(propertyKey)); } } Set<String> removeKeySet = new HashSet<>(); for (String key : basePropertiesMap.keySet()) { if (!COMMON_KEY.equals(key)) { Properties properties = basePropertiesMap.get(key); if (!properties.containsKey(DRIVER_KEY) || !properties.containsKey(URL_KEY)) { LOGGER.error("{} will be ignored. {}.{} and {}.{} is mandatory.", key, DRIVER_KEY, key, key, URL_KEY); removeKeySet.add(key); } } } for (String key : removeKeySet) { basePropertiesMap.remove(key); } LOGGER.debug("JDBC PropertiesMap: {}", basePropertiesMap); setMaxLineResults(); setMaxRows(); //TODO(zjffdu) Set different sql splitter for different sql dialects. this.sqlSplitter = new SqlSplitter(); }
@Test void testSelectQuery() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1000"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "select * from test_table WHERE ID in ('a', 'b'); "; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("ID\tNAME\na\ta_name\nb\tb_name\n", resultMessages.get(0).getData()); context = getInterpreterContext(); context.getLocalProperties().put("limit", "1"); interpreterResult = t.interpret(sqlQuery, context); resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("ID\tNAME\na\ta_name\n", resultMessages.get(0).getData()); }
public abstract @NonNull VirtualFile[] list() throws IOException;
@Test public void simpleList_AbstractBase() throws Exception { // This test checks the method's behavior in the abstract base class, // which has limited behavior. prepareFileStructureForIsDescendant(tmp.getRoot()); File root = tmp.getRoot(); VirtualFile virtualRoot = new VirtualFileMinimalImplementation(root); List<VirtualFile> children = Arrays.asList(virtualRoot.list()); assertThat(children, hasSize(2)); assertThat(children, containsInAnyOrder( VFMatcher.hasName("a"), VFMatcher.hasName("b") )); }
static void checkValidTableName(String nameToCheck) { if (nameToCheck.length() < MIN_TABLE_ID_LENGTH) { throw new IllegalArgumentException("Table name cannot be empty. "); } if (nameToCheck.length() > MAX_TABLE_ID_LENGTH) { throw new IllegalArgumentException( "Table name " + nameToCheck + " cannot be longer than " + MAX_TABLE_ID_LENGTH + " characters."); } if (ILLEGAL_TABLE_CHARS.matcher(nameToCheck).find()) { throw new IllegalArgumentException( "Table name " + nameToCheck + " is not a valid name. Periods and forward slashes are not allowed."); } }
@Test public void testCheckValidTableNameThrowsErrorWhenNameIsTooShort() { assertThrows(IllegalArgumentException.class, () -> checkValidTableName("")); }
public File getConfDirectory() { return confDirectory; }
@Test public void conf_directory_is_conf_es_subdirectory_of_sq_temp_directory() throws IOException { File tempDir = temp.newFolder(); Props props = new Props(new Properties()); props.set(PATH_DATA.getKey(), temp.newFolder().getAbsolutePath()); props.set(PATH_HOME.getKey(), temp.newFolder().getAbsolutePath()); props.set(PATH_TEMP.getKey(), tempDir.getAbsolutePath()); props.set(PATH_LOGS.getKey(), temp.newFolder().getAbsolutePath()); EsInstallation underTest = new EsInstallation(props); assertThat(underTest.getConfDirectory()).isEqualTo(new File(tempDir, "conf/es")); }
boolean isAcceptable(final long leaderMemberId, final long memberId) { switch (sourceType) { case LEADER: return NULL_VALUE != leaderMemberId && leaderMemberId == memberId; case FOLLOWER: return NULL_VALUE == leaderMemberId || leaderMemberId != memberId; case ANY: return true; } throw new IllegalStateException("Unknown sourceType=" + sourceType); }
@Test void leaderLogSourceTypeShouldOnlyAcceptLeader() { final LogSourceValidator logSourceValidator = new LogSourceValidator(ClusterBackup.SourceType.LEADER); final long leaderMemberId = 123; final long followerMemberId = 456; assertTrue(logSourceValidator.isAcceptable(leaderMemberId, leaderMemberId)); assertFalse(logSourceValidator.isAcceptable(leaderMemberId, followerMemberId)); assertFalse(logSourceValidator.isAcceptable(NULL_VALUE, NULL_VALUE)); assertFalse(logSourceValidator.isAcceptable(leaderMemberId, NULL_VALUE)); assertFalse(logSourceValidator.isAcceptable(NULL_VALUE, followerMemberId)); }
@Override public void validate(AccessResource accessResource) { aclPlugEngine.validate((PlainAccessResource) accessResource); }
@Test public void validateTest() { SendMessageRequestHeader messageRequestHeader = new SendMessageRequestHeader(); messageRequestHeader.setTopic("topicB"); RemotingCommand remotingCommand = RemotingCommand.createRequestCommand(RequestCode.SEND_MESSAGE, messageRequestHeader); aclClient.doBeforeRequest("", remotingCommand); ByteBuffer buf = remotingCommand.encodeHeader(); buf.getInt(); buf = ByteBuffer.allocate(buf.limit() - buf.position()).put(buf); buf.position(0); try { PlainAccessResource accessResource = (PlainAccessResource) plainAccessValidator.parse(RemotingCommand.decode(buf), "123.4.5.6"); plainAccessValidator.validate(accessResource); } catch (RemotingCommandException e) { e.printStackTrace(); Assert.fail("Should not throw IOException"); } }
public FunctionEvaluator evaluatorOf(String modelName, String ... names) { return requireModel(modelName).evaluatorOf(names); }
@Test public void testEvaluationDependingFunctionTakingArguments() { ModelsEvaluator models = createModels(); FunctionEvaluator function = models.evaluatorOf("macros", "secondphase"); function.bind("match", 3); function.bind("rankBoost", 5); assertEquals(32.0, function.evaluate().asDouble(), delta); }
public String getShortName() { return getGroupId() + ":" + getArtifactId() + ":" + getVersion() + ((getClassifier() == null) ? "" : ":" + getClassifier()); }
@Test public void testGetShortName() throws Exception { assertThat(new DependencyJar("com.group", "artifact", "1.3", null).getShortName()) .isEqualTo("com.group:artifact:1.3"); assertThat(new DependencyJar("com.group", "artifact", "1.3", "dll").getShortName()) .isEqualTo("com.group:artifact:1.3:dll"); }
@Override public Position getPosition() { return internal.getPosition(); }
@Test public void shouldDelegateGetPosition() { final Position position = mock(Position.class); when(inner.getPosition()).thenReturn(position); assertThat(store.getPosition(), is(position)); }
public static String formatExpression(final Expression expression) { return formatExpression(expression, FormatOptions.of(s -> false)); }
@Test public void shouldFormatLikePredicateWithEscape() { final LikePredicate predicate = new LikePredicate(new StringLiteral("string"), new StringLiteral("*"), Optional.of('!')); assertThat(ExpressionFormatter.formatExpression(predicate), equalTo("('string' LIKE '*' ESCAPE '!')")); }
public String dump( boolean includeGeneral ) { StringBuilder out = new StringBuilder( 50000 ); for ( LoggingObjectInterface o : this.map.values() ) { if ( ( includeGeneral ) || ( !o.getObjectType().equals( LoggingObjectType.GENERAL ) ) ) { out.append( o.getContainerObjectId() ); out.append( '\t' ); out.append( o.getLogChannelId() ); out.append( '\t' ); out.append( o.getObjectType().name() ); out.append( '\t' ); out.append( o.getObjectName() ); out.append( '\t' ); out.append( o.getParent() != null ? o.getParent().getLogChannelId() : "-" ); out.append( '\t' ); out.append( o.getParent() != null ? o.getParent().getObjectType().name() : "-" ); out.append( '\t' ); out.append( o.getParent() != null ? o.getParent().getObjectName() : "-" ); out.append( '\n' ); } } return out.toString(); }
@Test public void testDump() { LoggingRegistry loggingRegistry = LoggingRegistry.getInstance(); loggingRegistry.schedulePurgeTimer(); loggingRegistry.updateFromProperties(); loggingRegistry.reset(); LoggingObjectInterface obj = new SimpleLoggingObject( UUID.randomUUID().toString(), LoggingObjectType.JOB, null ); String id = loggingRegistry.registerLoggingSource( obj ); String output = loggingRegistry.dump( false ); assertFalse( output.isEmpty() ); assertTrue( output.contains( id ) ); }
@Override public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { try { String partitionColumn = job.get(Constants.JDBC_PARTITION_COLUMN); int numPartitions = job.getInt(Constants.JDBC_NUM_PARTITIONS, -1); String lowerBound = job.get(Constants.JDBC_LOW_BOUND); String upperBound = job.get(Constants.JDBC_UPPER_BOUND); InputSplit[] splits; if (!job.getBoolean(Constants.JDBC_SPLIT_QUERY, true) || numPartitions <= 1) { // We will not split this query if: // 1. hive.sql.query.split is set to false (either manually or automatically by calcite // 2. numPartitions == 1 splits = new InputSplit[1]; splits[0] = new JdbcInputSplit(FileInputFormat.getInputPaths(job)[0]); LOGGER.info("Creating 1 input split " + splits[0]); return splits; } dbAccessor = DatabaseAccessorFactory.getAccessor(job); Path[] tablePaths = FileInputFormat.getInputPaths(job); // We will split this query into n splits LOGGER.debug("Creating {} input splits", numPartitions); if (partitionColumn != null) { List<String> columnNames = dbAccessor.getColumnNames(job); if (!columnNames.contains(partitionColumn)) { throw new IOException("Cannot find partitionColumn:" + partitionColumn + " in " + columnNames); } List<TypeInfo> hiveColumnTypesList = dbAccessor.getColumnTypes(job); TypeInfo typeInfo = hiveColumnTypesList.get(columnNames.indexOf(partitionColumn)); if (!(typeInfo instanceof PrimitiveTypeInfo)) { throw new IOException(partitionColumn + " is a complex type, only primitive type can be a partition column"); } if (lowerBound == null || upperBound == null) { Pair<String, String> boundary = dbAccessor.getBounds(job, partitionColumn, lowerBound == null, upperBound == null); if (lowerBound == null) { lowerBound = boundary.getLeft(); } if (upperBound == null) { upperBound = boundary.getRight(); } } if (lowerBound == null) { throw new IOException("lowerBound of " + partitionColumn + " cannot be null"); } if (upperBound == null) { throw new IOException("upperBound of " + partitionColumn + " cannot be null"); } IntervalSplitter intervalSplitter = IntervalSplitterFactory.newIntervalSpitter(typeInfo); List<MutablePair<String, String>> intervals = intervalSplitter.getIntervals(lowerBound, upperBound, numPartitions, typeInfo); if (intervals.size()<=1) { LOGGER.debug("Creating 1 input splits"); splits = new InputSplit[1]; splits[0] = new JdbcInputSplit(FileInputFormat.getInputPaths(job)[0]); return splits; } intervals.get(0).setLeft(null); intervals.get(intervals.size()-1).setRight(null); splits = new InputSplit[intervals.size()]; for (int i = 0; i < intervals.size(); i++) { splits[i] = new JdbcInputSplit(partitionColumn, intervals.get(i).getLeft(), intervals.get(i).getRight(), tablePaths[0]); } } else { int numRecords = dbAccessor.getTotalNumberOfRecords(job); if (numRecords < numPartitions) { numPartitions = numRecords; } int numRecordsPerSplit = numRecords / numPartitions; int numSplitsWithExtraRecords = numRecords % numPartitions; LOGGER.debug("Num records = {}", numRecords); splits = new InputSplit[numPartitions]; int offset = 0; for (int i = 0; i < numPartitions; i++) { int numRecordsInThisSplit = numRecordsPerSplit; if (i < numSplitsWithExtraRecords) { numRecordsInThisSplit++; } splits[i] = new JdbcInputSplit(numRecordsInThisSplit, offset, tablePaths[0]); offset += numRecordsInThisSplit; } } dbAccessor = null; LOGGER.info("Num input splits created {}", splits.length); for (InputSplit split : splits) { LOGGER.info("split:" + split.toString()); } return splits; } catch (Exception e) { LOGGER.error("Error while splitting input data.", e); throw new IOException(e); } }
@Test public void testIntervalSplit_Long() throws HiveJdbcDatabaseAccessException, IOException { JdbcInputFormat f = new JdbcInputFormat(); when(mockDatabaseAccessor.getColumnNames(any(Configuration.class))).thenReturn(Lists.newArrayList("a")); List<TypeInfo> columnTypes = Collections.singletonList(TypeInfoFactory.intTypeInfo); when(mockDatabaseAccessor.getColumnTypes(any(Configuration.class))).thenReturn(columnTypes); JobConf conf = new JobConf(); conf.set("mapred.input.dir", "/temp"); conf.set("hive.sql.partitionColumn", "a"); conf.set("hive.sql.numPartitions", "3"); conf.set("hive.sql.lowerBound", "1"); conf.set("hive.sql.upperBound", "10"); InputSplit[] splits = f.getSplits(conf, -1); assertThat(splits, is(notNullValue())); assertThat(splits.length, is(3)); assertNull(((JdbcInputSplit)splits[0]).getLowerBound()); assertEquals(((JdbcInputSplit)splits[0]).getUpperBound(), "4"); assertEquals(((JdbcInputSplit)splits[1]).getLowerBound(), "4"); assertEquals(((JdbcInputSplit)splits[1]).getUpperBound(), "7"); assertEquals(((JdbcInputSplit)splits[2]).getLowerBound(), "7"); assertNull(((JdbcInputSplit)splits[2]).getUpperBound()); }
public static <InputT, OutputT> DoFnInvoker<InputT, OutputT> invokerFor( DoFn<InputT, OutputT> fn) { return ByteBuddyDoFnInvokerFactory.only().newByteBuddyInvoker(fn); }
@Test public void testDoFnWithStartBundleSetupTeardown() throws Exception { when(mockArgumentProvider.pipelineOptions()).thenReturn(mockOptions); when(mockArgumentProvider.startBundleContext(any(DoFn.class))) .thenReturn(mockStartBundleContext); when(mockArgumentProvider.finishBundleContext(any(DoFn.class))) .thenReturn(mockFinishBundleContext); class MockFn extends DoFn<String, String> { @ProcessElement public void processElement(ProcessContext c) {} @StartBundle public void startBundle(StartBundleContext c) {} @FinishBundle public void finishBundle(FinishBundleContext c) {} @Setup public void before(PipelineOptions options) {} @Teardown public void after() {} } MockFn fn = mock(MockFn.class); DoFnInvoker<String, String> invoker = DoFnInvokers.invokerFor(fn); invoker.invokeSetup(mockArgumentProvider); invoker.invokeStartBundle(mockArgumentProvider); invoker.invokeFinishBundle(mockArgumentProvider); invoker.invokeTeardown(); verify(fn).before(mockOptions); verify(fn).startBundle(mockStartBundleContext); verify(fn).finishBundle(mockFinishBundleContext); verify(fn).after(); }
public void unlockProcessingPause() { processingPauseLocked.set(false); }
@Test public void testUnlockProcessingPause() throws Exception { status.pauseMessageProcessing(true); assertTrue(status.processingPauseLocked()); status.unlockProcessingPause(); assertFalse(status.processingPauseLocked()); }
public static Map<String, Object> extractProperties(Map<String, Object> properties, String optionPrefix) { return extractProperties(properties, optionPrefix, true); }
@Test public void testExtractProperties() { Map<String, Object> params = new LinkedHashMap<>(); params.put("foo.name", "Camel"); params.put("foo.age", 5); params.put("bar", "yes"); // extract all "foo." properties // and their keys should have the prefix removed Map<String, Object> foo = PropertiesHelper.extractProperties(params, "foo.", true); assertEquals(2, foo.size()); assertEquals("Camel", foo.get("name")); assertEquals(5, foo.get("age")); // the extracted properties should be removed from original assertEquals(1, params.size()); assertEquals("yes", params.get("bar")); }
@Override public LogicalSchema getSchema() { return outputSchema; }
@Test public void shouldBuildPullQueryOutputSchemaSelectKeyAndWindowBounds() { // Given: when(keyFormat.isWindowed()).thenReturn(true); when(source.getSchema()).thenReturn(INPUT_SCHEMA.withPseudoAndKeyColsInValue(true)); final UnqualifiedColumnReferenceExp windowstartRef = new UnqualifiedColumnReferenceExp(SystemColumns.WINDOWSTART_NAME); final UnqualifiedColumnReferenceExp windowendRef = new UnqualifiedColumnReferenceExp(SystemColumns.WINDOWEND_NAME); selects = ImmutableList.<SelectItem>builder() .add(new SingleColumn(windowstartRef, Optional.of(SystemColumns.WINDOWSTART_NAME))) .add((new SingleColumn(windowendRef, Optional.of(SystemColumns.WINDOWEND_NAME)))) .add((new SingleColumn(K_REF, Optional.of(K)))).build(); when(analysis.getSelectColumnNames()).thenReturn( ImmutableSet.of(SystemColumns.WINDOWSTART_NAME, SystemColumns.WINDOWEND_NAME, K)); // When: final QueryProjectNode projectNode = new QueryProjectNode( NODE_ID, source, selects, metaStore, ksqlConfig, analysis, true, plannerOptions, false ); // Then: final LogicalSchema expected = LogicalSchema.builder() .keyColumn(SystemColumns.WINDOWSTART_NAME, SqlTypes.BIGINT) .keyColumn(SystemColumns.WINDOWEND_NAME, SqlTypes.BIGINT) .keyColumn(K, SqlTypes.STRING) .build(); assertThat(expected, is(projectNode.getSchema())); }
public static String unEscapeString(String str) { return unEscapeString(str, ESCAPE_CHAR, COMMA); }
@Test (timeout = 30000) public void testUnescapeString() throws Exception { assertEquals(NULL_STR, StringUtils.unEscapeString(NULL_STR)); assertEquals(EMPTY_STR, StringUtils.unEscapeString(EMPTY_STR)); assertEquals(STR_WO_SPECIAL_CHARS, StringUtils.unEscapeString(STR_WO_SPECIAL_CHARS)); try { StringUtils.unEscapeString(STR_WITH_COMMA); fail("Should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } assertEquals(STR_WITH_COMMA, StringUtils.unEscapeString(ESCAPED_STR_WITH_COMMA)); try { StringUtils.unEscapeString(STR_WITH_ESCAPE); fail("Should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } assertEquals(STR_WITH_ESCAPE, StringUtils.unEscapeString(ESCAPED_STR_WITH_ESCAPE)); try { StringUtils.unEscapeString(STR_WITH_BOTH2); fail("Should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } assertEquals(STR_WITH_BOTH2, StringUtils.unEscapeString(ESCAPED_STR_WITH_BOTH2)); }
@Override public Iterable<RedisClusterNode> clusterGetNodes() { RedisStrictCommand<List<RedisClusterNode>> cluster = new RedisStrictCommand<List<RedisClusterNode>>("CLUSTER", "NODES", new ObjectDecoder(new RedisClusterNodeDecoder(executorService.getServiceManager()))); return read(null, StringCodec.INSTANCE, cluster); }
@Test public void testClusterGetNodes() { testInCluster(connection -> { Iterable<RedisClusterNode> nodes = connection.clusterGetNodes(); assertThat(nodes).hasSize(6); for (RedisClusterNode redisClusterNode : nodes) { assertThat(redisClusterNode.getLinkState()).isNotNull(); assertThat(redisClusterNode.getFlags()).isNotEmpty(); assertThat(redisClusterNode.getHost()).isNotNull(); assertThat(redisClusterNode.getPort()).isNotNull(); assertThat(redisClusterNode.getId()).isNotNull(); assertThat(redisClusterNode.getType()).isNotNull(); if (redisClusterNode.getType() == NodeType.MASTER) { assertThat(redisClusterNode.getSlotRange().getSlots()).isNotEmpty(); } else { assertThat(redisClusterNode.getMasterId()).isNotNull(); } } }); }
@Override public void runCheck() throws PreflightCheckException { checkDatanodeLock(directories.getConfigurationTargetDir()); checkDatanodeLock(directories.getLogsTargetDir()); }
@Test void testValidExistingLock(@TempDir Path dataDir, @TempDir Path logsDir, @TempDir Path configDir) throws IOException { final Path logsDirLock = logsDir.resolve(DatanodeDirectoriesLockfileCheck.DATANODE_LOCKFILE); final Path configDirLock = configDir.resolve(DatanodeDirectoriesLockfileCheck.DATANODE_LOCKFILE); Files.writeString(logsDirLock, VALID_NODE_ID); Files.writeString(configDirLock, VALID_NODE_ID); final PreflightCheck check = new DatanodeDirectoriesLockfileCheck(VALID_NODE_ID, new DatanodeDirectories(dataDir, logsDir, null, configDir)); check.runCheck(); Assertions.assertThat(Files.readString(logsDirLock)).isEqualTo(VALID_NODE_ID); Assertions.assertThat(Files.readString(configDirLock)).isEqualTo(VALID_NODE_ID); }
@DELETE @Path("{networkId}/devices/{deviceId}/ports/{portNum}") public Response removeVirtualPort(@PathParam("networkId") long networkId, @PathParam("deviceId") String deviceId, @PathParam("portNum") long portNum) { NetworkId nid = NetworkId.networkId(networkId); vnetAdminService.removeVirtualPort(nid, DeviceId.deviceId(deviceId), PortNumber.portNumber(portNum)); return Response.noContent().build(); }
@Test public void testDeleteVirtualPort() { NetworkId networkId = networkId3; DeviceId deviceId = devId2; PortNumber portNum = portNumber(2); mockVnetAdminService.removeVirtualPort(networkId, deviceId, portNum); expectLastCall(); replay(mockVnetAdminService); WebTarget wt = target() .property(ClientProperties.SUPPRESS_HTTP_COMPLIANCE_VALIDATION, true); String reqLocation = "vnets/" + networkId.toString() + "/devices/" + deviceId.toString() + "/ports/" + portNum.toLong(); Response response = wt.path(reqLocation) .request(MediaType.APPLICATION_JSON_TYPE) .delete(); assertThat(response.getStatus(), is(HttpURLConnection.HTTP_NO_CONTENT)); verify(mockVnetAdminService); }
public DoubleArrayAsIterable usingTolerance(double tolerance) { return new DoubleArrayAsIterable(tolerance(tolerance), iterableSubject()); }
@Test public void usingTolerance_contains_successWithNegativeZero() { assertThat(array(1.1, -0.0, 3.3)).usingTolerance(0.0).contains(0.0); }
@Override public V put(@Nullable final K key, final V value) { if (key == null) { if (nullEntry == null) { _size += 1; nullEntry = new Entry<>(null, value); return null; } return nullEntry.setValue(value); } final Entry<K, V>[] table = this.table; final int hash = key.hashCode(); final int index = HashUtil.indexFor(hash, table.length, mask); for (Entry<K, V> e = table[index]; e != null; e = e.hashNext) { final K entryKey; if ((entryKey = e.key) == key || entryKey.equals(key)) { return e.setValue(value); } } final Entry<K, V> e = new Entry<>(key, value); e.hashNext = table[index]; table[index] = e; _size += 1; if (_size > capacity) { rehash(HashUtil.nextCapacity(capacity)); } return null; }
@Test public void testCopy() { final HashMap<Integer, String> tested = new HashMap<>(); tested.put(7, "a"); tested.put(8, "b"); HashMap<Integer, String> copy = new HashMap<>(tested); Assert.assertEquals("a", copy.get(7)); Assert.assertEquals("b", copy.get(8)); Assert.assertEquals(2, copy.size()); }
@Override public Set<TopicPartition> inputPartitions() { return task.inputPartitions(); }
@Test public void shouldDelegateInputPartitions() { final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task); readOnlyTask.inputPartitions(); verify(task).inputPartitions(); }
public void callIdentify(String distinctId) { JSONObject jsonObject = new JSONObject(); try { jsonObject.put("distinctId", distinctId); call("identify", jsonObject); } catch (JSONException e) { SALog.printStackTrace(e); } }
@Test public void callIdentify() { SensorsDataAPI sensorsDataAPI = SAHelper.initSensors(mApplication); sensorsDataAPI.addFunctionListener(new SAFunctionListener() { @Override public void call(String function, JSONObject args) { Assert.assertEquals("identify", function); } }); sensorsDataAPI.identify("abcded"); }
@Override protected Class<?> loadClass(final String name, final boolean resolve) throws ClassNotFoundException { if (blacklist.test(name)) { throw new ClassNotFoundException("The requested class is not permitted to be used from a " + "udf. Class " + name); } Class<?> clazz = findLoadedClass(name); if (clazz == null) { try { if (shouldLoadFromChild(name)) { clazz = findClass(name); } } catch (final ClassNotFoundException e) { logger.trace("Class {} not found in {} using parent classloader", name, path); } } if (clazz == null) { clazz = super.loadClass(name, false); } if (resolve) { resolveClass(clazz); } return clazz; }
@Test public void shouldLoadClassesFromParentIfNotFoundInChild() throws ClassNotFoundException { assertThat(udfClassLoader.loadClass(ONLY_IN_PARENT, true), equalTo(OnlyInParent.class)); }
@Override public String execute(CommandContext commandContext, String[] args) { Channel channel = commandContext.getRemote(); if (ArrayUtils.isEmpty(args)) { return "Please input service name, eg: \r\ncd XxxService\r\ncd com.xxx.XxxService"; } String message = args[0]; StringBuilder buf = new StringBuilder(); if ("/".equals(message) || "..".equals(message)) { String service = channel.attr(SERVICE_KEY).getAndRemove(); buf.append("Cancelled default service ").append(service).append('.'); } else { boolean found = false; for (Exporter<?> exporter : dubboProtocol.getExporters()) { if (message.equals(exporter.getInvoker().getInterface().getSimpleName()) || message.equals(exporter.getInvoker().getInterface().getName()) || message.equals(exporter.getInvoker().getUrl().getPath()) || message.equals(exporter.getInvoker().getUrl().getServiceKey())) { found = true; break; } } if (found) { channel.attr(SERVICE_KEY).set(message); buf.append("Used the ") .append(message) .append(" as default.\r\nYou can cancel default service by command: cd /"); } else { buf.append("No such service ").append(message); } } return buf.toString(); }
@Test void testChangeServiceKey() { ExtensionLoader.getExtensionLoader(Protocol.class) .getExtension(DubboProtocol.NAME) .export(mockInvoker); String result = change.execute(mockCommandContext, new String[] {"g/demo:1.0.0"}); assertEquals("Used the g/demo:1.0.0 as default.\r\nYou can cancel default service by command: cd /", result); }
@Override public double calcEdgeWeight(EdgeIteratorState edgeState, boolean reverse) { double priority = edgeToPriorityMapping.get(edgeState, reverse); if (priority == 0) return Double.POSITIVE_INFINITY; final double distance = edgeState.getDistance(); double seconds = calcSeconds(distance, edgeState, reverse); if (Double.isInfinite(seconds)) return Double.POSITIVE_INFINITY; // add penalty at start/stop/via points if (edgeState.get(EdgeIteratorState.UNFAVORED_EDGE)) seconds += headingPenaltySeconds; double distanceCosts = distance * distanceInfluence; if (Double.isInfinite(distanceCosts)) return Double.POSITIVE_INFINITY; return seconds / priority + distanceCosts; }
@Test public void testWeightWrongHeading() { CustomModel customModel = createSpeedCustomModel(avSpeedEnc).setHeadingPenalty(100); Weighting weighting = createWeighting(customModel); EdgeIteratorState edge = graph.edge(1, 2) .set(avSpeedEnc, 10, 10) .setDistance(10).setWayGeometry(Helper.createPointList(51, 0, 51, 1)); VirtualEdgeIteratorState virtEdge = new VirtualEdgeIteratorState(edge.getEdgeKey(), 99, 5, 6, edge.getDistance(), edge.getFlags(), edge.getKeyValues(), edge.fetchWayGeometry(FetchMode.PILLAR_ONLY), false); double time = weighting.calcEdgeWeight(virtEdge, false); virtEdge.setUnfavored(true); // heading penalty on edge assertEquals(time + 100, weighting.calcEdgeWeight(virtEdge, false), 1e-8); // only after setting it virtEdge.setUnfavored(true); assertEquals(time + 100, weighting.calcEdgeWeight(virtEdge, true), 1e-8); // but not after releasing it virtEdge.setUnfavored(false); assertEquals(time, weighting.calcEdgeWeight(virtEdge, true), 1e-8); // test default penalty virtEdge.setUnfavored(true); customModel = createSpeedCustomModel(avSpeedEnc); weighting = createWeighting(customModel); assertEquals(time + Parameters.Routing.DEFAULT_HEADING_PENALTY, weighting.calcEdgeWeight(virtEdge, false), 1e-8); }
@Override public boolean isIn(String ipAddress) { if (ipAddress == null || addressList == null) { return false; } return addressList.includes(ipAddress); }
@Test public void testWithMultipleSubnetAndIPs() throws IOException { String[] ips = {"10.119.103.112", "10.221.102.0/23", "10.222.0.0/16", "10.113.221.221"}; createFileWithEntries ("ips.txt", ips); IPList ipList = new FileBasedIPList("ips.txt"); assertTrue ("10.119.103.112 is not in the list", ipList.isIn("10.119.103.112")); assertFalse ("10.119.103.113 is in the list", ipList.isIn("10.119.103.113")); assertTrue ("10.221.103.121 is not in the list", ipList.isIn("10.221.103.121")); assertFalse("10.221.104.0 is in the list", ipList.isIn("10.221.104.0")); assertTrue ("10.222.103.121 is not in the list", ipList.isIn("10.222.103.121")); assertFalse("10.223.104.0 is in the list", ipList.isIn("10.223.104.0")); assertTrue ("10.113.221.221 is not in the list", ipList.isIn("10.113.221.221")); assertFalse("10.113.221.222 is in the list", ipList.isIn("10.113.221.222")); }
@Override public V get(final K key) { Objects.requireNonNull(key); final List<ReadOnlyKeyValueStore<K, V>> stores = storeProvider.stores(storeName, storeType); for (final ReadOnlyKeyValueStore<K, V> store : stores) { try { final V result = store.get(key); if (result != null) { return result; } } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException("State store is not available anymore and may have been migrated to another instance; please re-discover its location from the state metadata."); } } return null; }
@Test public void shouldReturnValueIfExists() { stubOneUnderlying.put("key", "value"); assertEquals("value", theStore.get("key")); }
public synchronized boolean sendHttpEvent(SplunkEvent event) { return sendHttpEvents(Collections.singletonList(event)); }
@Test public void testSendHttpEventsShouldThrowErrorWhenHttpClientReturnsErrorCode() throws IOException { SplunkEvent event = SplunkEvent.newBuilder().withEvent(EVENT).create(); try (CloseableHttpResponse mockResponse = clientFactory.getHttpClient().execute(any(HttpPost.class))) { when(mockResponse.getStatusLine().getStatusCode()).thenReturn(404); } assertThrows(SplunkResourceManagerException.class, () -> testManager.sendHttpEvent(event)); }
@Override protected void addWordToStorage(String word, int frequency) { // not going to support addition of contacts! }
@Test public void testAddWordToStorageDoesNotHaveEffect() throws Exception { mDictionaryUnderTest.addWordToStorage("aword", 126); TestRxSchedulers.drainAllTasks(); Assert.assertFalse(mDictionaryUnderTest.isValidWord("aword")); }
protected List<Node> findOvercommittedHosts() { List<Node> overcommittedNodes = new ArrayList<>(); for (var entry : availableResources.entrySet()) { var resources = entry.getValue().nodeResources; if (resources.vcpu() < 0 || resources.memoryGiB() < 0 || resources.diskGb() < 0) { overcommittedNodes.add(entry.getKey()); } } return overcommittedNodes; }
@Test public void testOvercommittedHosts() { CapacityCheckerTester tester = new CapacityCheckerTester(); tester.createNodes(7, 4, 10, new NodeResources(-1, 10, 100, 1), 10, 0, new NodeResources(1, 10, 100, 1), 10); int overcommittedHosts = tester.capacityChecker.findOvercommittedHosts().size(); assertEquals(tester.nodeRepository.nodes().list().nodeType(NodeType.host).size(), overcommittedHosts); }
@Override public MergedResult merge(final List<QueryResult> queryResults, final SQLStatementContext sqlStatementContext, final ShardingSphereDatabase database, final ConnectionContext connectionContext) throws SQLException { if (!(sqlStatementContext instanceof FetchStatementContext)) { return new TransparentMergedResult(queryResults.get(0)); } if (1 == queryResults.size()) { return new IteratorStreamMergedResult(queryResults); } FetchStatementContext fetchStatementContext = (FetchStatementContext) sqlStatementContext; Map<String, Integer> columnLabelIndexMap = getColumnLabelIndexMap(queryResults.get(0)); fetchStatementContext.getCursorStatementContext().getSelectStatementContext().setIndexes(columnLabelIndexMap); return new FetchStreamMergedResult(queryResults, fetchStatementContext, getSchema(sqlStatementContext, database), connectionContext); }
@Test void assertBuildIteratorStreamMergedResult() throws SQLException { ShardingDDLResultMerger merger = new ShardingDDLResultMerger(); assertThat(merger.merge(createSingleQueryResults(), mock(FetchStatementContext.class), mock(ShardingSphereDatabase.class), mock(ConnectionContext.class)), instanceOf(IteratorStreamMergedResult.class)); }
@Subscribe public void publishClusterEvent(Object event) { if (event instanceof DeadEvent) { LOG.debug("Skipping DeadEvent on cluster event bus"); return; } final String className = AutoValueUtils.getCanonicalName(event.getClass()); final ClusterEvent clusterEvent = ClusterEvent.create(nodeId.getNodeId(), className, Collections.singleton(nodeId.getNodeId()), event); try { final String id = dbCollection.save(clusterEvent, WriteConcern.JOURNALED).getSavedId(); // We are handling a locally generated event, so we can speed up processing by posting it to the local event // bus immediately. Due to having added the local node id to its list of consumers, it will not be picked up // by the db cursor again, avoiding double processing of the event. See #11263 for details. serverEventBus.post(event); LOG.debug("Published cluster event with ID <{}> and type <{}>", id, className); } catch (MongoException e) { LOG.error("Couldn't publish cluster event of type <" + className + ">", e); } }
@Test public void localNodeIsMarkedAsHavingConsumedEvent() { @SuppressWarnings("deprecation") DBCollection collection = mongoConnection.getDatabase().getCollection(ClusterEventPeriodical.COLLECTION_NAME); SimpleEvent event = new SimpleEvent("test"); clusterEventPeriodical.publishClusterEvent(event); DBObject dbObject = collection.findOne(); assertThat(((BasicDBList) dbObject.get("consumers")).toArray()).isEqualTo(new String[]{nodeId.getNodeId()}); }
public Region lookup(final Path file) throws BackgroundException { final Path container = containerService.getContainer(file); if(Location.unknown.equals(new SwiftLocationFeature.SwiftRegion(container.attributes().getRegion()))) { return this.lookup(location.getLocation(container)); } return this.lookup(new SwiftLocationFeature.SwiftRegion(container.attributes().getRegion())); }
@Test public void testFindDefaultLocationInBookmark() throws Exception { final ProtocolFactory factory = new ProtocolFactory(new HashSet<>(Collections.singleton(new SwiftProtocol()))); final Profile profile = new ProfilePlistReader(factory).read( this.getClass().getResourceAsStream("/Rackspace US (IAD).cyberduckprofile")); final SwiftSession session = new SwiftSession( new Host(profile, "identity.api.rackspacecloud.com", new Credentials( PROPERTIES.get("rackspace.user"), PROPERTIES.get("rackspace.password") )), new DisabledX509TrustManager(), new DefaultX509KeyManager()) { }; assertEquals("IAD", session.getHost().getRegion()); final Region location = new SwiftRegionService(session).lookup(new Path("/test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume))); assertNotNull(location); assertEquals("IAD", location.getRegionId()); }
Map<Path, Set<Integer>> changedLines() { return tracker.changedLines(); }
@Test public void handle_index_using_absolute_paths() throws IOException { String example = "Index: /foo/sample1\n" + "===================================================================\n" + "--- a/sample1\n" + "+++ b/sample1\n" + "@@ -1 +1,3 @@\n" + " same line\n" + "+added line 1\n" + "+added line 2\n"; printDiff(example); assertThat(underTest.changedLines()).isEqualTo(Collections.singletonMap(rootBaseDir.resolve("sample1"), new HashSet<>(Arrays.asList(2, 3)))); }
@Override public Algorithm getEncryption(final Path file) throws BackgroundException { if(containerService.isContainer(file)) { final String key = String.format("s3.encryption.key.%s", containerService.getContainer(file).getName()); if(StringUtils.isNotBlank(new HostPreferences(session.getHost()).getProperty(key))) { return Algorithm.fromString(new HostPreferences(session.getHost()).getProperty(key)); } } return super.getEncryption(file); }
@Test public void testSetEncryptionKMSCustomKeySignatureVersionV4() throws Exception { final Path container = new Path("test-eu-west-1-cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory)); final Path test = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch(test, new TransferStatus()); final S3EncryptionFeature feature = new S3EncryptionFeature(session, new S3AccessControlListFeature(session)); feature.setEncryption(test, new Encryption.Algorithm("aws:kms", "arn:aws:kms:eu-west-1:930717317329:key/015fa0af-f95e-483e-8fb6-abffb46fb783")); final Encryption.Algorithm value = feature.getEncryption(test); assertEquals("aws:kms", value.algorithm); assertEquals("arn:aws:kms:eu-west-1:930717317329:key/015fa0af-f95e-483e-8fb6-abffb46fb783", value.key); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static URI parse(String featureIdentifier) { requireNonNull(featureIdentifier, "featureIdentifier may not be null"); if (featureIdentifier.isEmpty()) { throw new IllegalArgumentException("featureIdentifier may not be empty"); } // Legacy from the Cucumber Eclipse plugin // Older versions of Cucumber allowed it. if (CLASSPATH_SCHEME_PREFIX.equals(featureIdentifier)) { return rootPackageUri(); } if (nonStandardPathSeparatorInUse(featureIdentifier)) { String standardized = replaceNonStandardPathSeparator(featureIdentifier); return parseAssumeFileScheme(standardized); } if (isWindowsOS() && pathContainsWindowsDrivePattern(featureIdentifier)) { return parseAssumeFileScheme(featureIdentifier); } if (probablyURI(featureIdentifier)) { return parseProbableURI(featureIdentifier); } return parseAssumeFileScheme(featureIdentifier); }
@Test @EnabledOnOs(WINDOWS) void can_parse_windows_path_form() { URI uri = FeaturePath.parse("path\\to\\file.feature"); assertAll( () -> assertThat(uri.getScheme(), is("file")), () -> assertThat(uri.getSchemeSpecificPart(), endsWith("path/to/file.feature"))); }
public Bson parseSingleExpression(final String filterExpression, final List<EntityAttribute> attributes) { final Filter filter = singleFilterParser.parseSingleExpression(filterExpression, attributes); return filter.toBson(); }
@Test void parsesFilterExpressionCorrectlyForObjectIdType() { assertEquals(Filters.eq("id", new ObjectId("5f4dfb9c69be46153b9a9a7b")), toTest.parseSingleExpression("id:5f4dfb9c69be46153b9a9a7b", List.of(EntityAttribute.builder() .id("id") .title("Id") .type(SearchQueryField.Type.OBJECT_ID) .filterable(true) .build()) )); }
@Override public Long createBrand(ProductBrandCreateReqVO createReqVO) { // 校验 validateBrandNameUnique(null, createReqVO.getName()); // 插入 ProductBrandDO brand = ProductBrandConvert.INSTANCE.convert(createReqVO); brandMapper.insert(brand); // 返回 return brand.getId(); }
@Test public void testCreateBrand_success() { // 准备参数 ProductBrandCreateReqVO reqVO = randomPojo(ProductBrandCreateReqVO.class); // 调用 Long brandId = brandService.createBrand(reqVO); // 断言 assertNotNull(brandId); // 校验记录的属性是否正确 ProductBrandDO brand = brandMapper.selectById(brandId); assertPojoEquals(reqVO, brand); }
public boolean isJoin() { return sourceSchemas.size() > 1; }
@Test public void shouldBeJoinIfMultipleSchemas() { // When: sourceSchemas = new SourceSchemas(ImmutableMap.of(ALIAS_1, SCHEMA_1, ALIAS_2, SCHEMA_2)); // Then: assertThat(sourceSchemas.isJoin(), is(true)); }
public byte[] generateRandomBytes(final int length) { byte[] result = new byte[length]; for (int i = 0; i < length; i++) { result[i] = SEED[random.nextInt(SEED.length)]; } return result; }
@Test void assertGenerateRandomBytes() { assertThat(MySQLRandomGenerator.getINSTANCE().generateRandomBytes(8).length, is(8)); assertThat(MySQLRandomGenerator.getINSTANCE().generateRandomBytes(12).length, is(12)); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return PathAttributes.EMPTY; } final SMBSession.DiskShareWrapper share = session.openShare(file); try { if(new SMBPathContainerService(session).isContainer(file)) { final ShareInfo shareInformation = share.get().getShareInformation(); final PathAttributes attributes = new PathAttributes(); final long used = shareInformation.getTotalSpace() - shareInformation.getFreeSpace(); attributes.setSize(used); attributes.setQuota(new Quota.Space(used, shareInformation.getFreeSpace())); return attributes; } else { final FileAllInformation fileInformation = share.get().getFileInformation(new SMBPathContainerService(session).getKey(file)); if(file.isDirectory() && !fileInformation.getStandardInformation().isDirectory()) { throw new NotfoundException(String.format("File %s found but type is not directory", file.getName())); } else if(file.isFile() && fileInformation.getStandardInformation().isDirectory()) { throw new NotfoundException(String.format("File %s found but type is not file", file.getName())); } return this.toAttributes(fileInformation); } } catch(SMBRuntimeException e) { throw new SMBExceptionMappingService().map("Failure to read attributes of {0}", e, file); } finally { session.releaseShare(share); } }
@Test(expected = NotfoundException.class) public void testFindNotFound() throws Exception { final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final SMBAttributesFinderFeature f = new SMBAttributesFinderFeature(session); f.find(test); }
@ScalarOperator(CAST) @SqlType(StandardTypes.BOOLEAN) public static boolean castToBoolean(@SqlType(StandardTypes.SMALLINT) long value) { return value != 0; }
@Test public void testCastToBoolean() { assertFunction("cast(SMALLINT'37' as boolean)", BOOLEAN, true); assertFunction("cast(SMALLINT'17' as boolean)", BOOLEAN, true); assertFunction("cast(SMALLINT'0' as boolean)", BOOLEAN, false); }
@Override protected List<MatchResult> match(List<String> specs) throws IOException { return match(new File(".").getAbsolutePath(), specs); }
@Test public void testMatchInDirectory() throws Exception { List<String> expected = ImmutableList.of(temporaryFolder.newFile("a").toString()); temporaryFolder.newFile("aa"); temporaryFolder.newFile("ab"); String expectedFile = expected.get(0); int slashIndex = expectedFile.lastIndexOf('/'); if (SystemUtils.IS_OS_WINDOWS) { slashIndex = expectedFile.lastIndexOf('\\'); } String directory = expectedFile.substring(0, slashIndex); String relative = expectedFile.substring(slashIndex + 1); // This no longer works: // System.setProperty("user.dir", directory); // There is no way to set the working directory without forking. Instead we // call in to the helper method that gives just about as good test coverage. List<MatchResult> results = localFileSystem.match(directory, ImmutableList.of(relative)); assertThat( toFilenames(results), containsInAnyOrder(expected.toArray(new String[expected.size()]))); }
public static @CheckForNull String getCauseOfBlockage(@NonNull FlowNode stage, @Nullable FlowNode nodeBlock) { if(nodeBlock != null){ //Check and see if this node block is inside this stage for(FlowNode p:nodeBlock.getParents()){ if(p.equals(stage)){ Queue.Item item = QueueItemAction.getQueueItem(nodeBlock); if (item != null) { CauseOfBlockage causeOfBlockage = item.getCauseOfBlockage(); String cause = null; if (causeOfBlockage != null) { cause = causeOfBlockage.getShortDescription(); if (cause == null) { causeOfBlockage = item.task.getCauseOfBlockage(); if(causeOfBlockage != null) { return causeOfBlockage.getShortDescription(); } } } return cause; } } } } return null; }
@Test public void getCauseOfBlockage() throws Exception { CauseOfBlockage blockage = mock(CauseOfBlockage.class); CauseOfBlockage taskBlockage = mock(CauseOfBlockage.class); FlowNode stage = mock(FlowNode.class); FlowNode nodeBlock = mock(FlowNode.class); Queue.Item item = mock(Queue.Item.class); mockStatic(QueueItemAction.class); String cause = null; cause = PipelineNodeUtil.getCauseOfBlockage(stage, null); assertNull(cause); when(nodeBlock.getParents()).thenReturn(Collections.emptyList()); cause = PipelineNodeUtil.getCauseOfBlockage(stage, null); assertNull(cause); when(nodeBlock.getParents()).thenReturn(Collections.singletonList(stage)); when(QueueItemAction.getQueueItem(nodeBlock)).thenReturn(null); cause = PipelineNodeUtil.getCauseOfBlockage(stage, null); assertNull(cause); when(QueueItemAction.getQueueItem(nodeBlock)).thenReturn(item); when(item.getCauseOfBlockage()).thenReturn(null); cause = PipelineNodeUtil.getCauseOfBlockage(stage, null); assertNull(cause); when(blockage.getShortDescription()).thenReturn("test"); when(item.getCauseOfBlockage()).thenReturn(blockage); cause = PipelineNodeUtil.getCauseOfBlockage(stage, nodeBlock); assertEquals("test", cause); when(blockage.getShortDescription()).thenReturn(null); cause = PipelineNodeUtil.getCauseOfBlockage(stage, null); assertNull(cause); when(taskBlockage.getShortDescription()).thenReturn("test1"); Whitebox.setInternalState(item,"task", mock(Queue.Task.class)); when(item.task.getCauseOfBlockage()).thenReturn(taskBlockage); cause = PipelineNodeUtil.getCauseOfBlockage(stage, nodeBlock); assertEquals("test1", cause); }
public Process executeWithNoStreamCollection() throws IOException, InterruptedException, ExecutionException { logger.atInfo().log("Executing the following command: '%s'", COMMAND_ARGS_JOINER.join(args)); process = processBuilder.start(); return process; }
@Test public void executeWithNoStreamCollection_always_startsProcessAndReturnsProcessInstance() throws IOException, InterruptedException, ExecutionException { CommandExecutor executor = new CommandExecutor("/bin/sh", "-c", "echo 1"); Process process = executor.executeWithNoStreamCollection(); process.waitFor(); assertThat(process.exitValue()).isEqualTo(0); }
public static String decodeBody(byte[] body, byte dataCoding, String defaultEncoding) throws UnsupportedEncodingException { Alphabet alphabet = Alphabet.parseDataCoding(dataCoding); if (body == null || SmppUtils.is8Bit(alphabet)) { return null; } switch (alphabet) { case ALPHA_IA5: return new String(body, StandardCharsets.US_ASCII); case ALPHA_LATIN1: return new String(body, StandardCharsets.ISO_8859_1); case ALPHA_UCS2: return new String(body, StandardCharsets.UTF_16BE); default: return new String(body, defaultEncoding); } }
@Test void testDecodeBodyWithUnsupportedDefaultEncodingShouldThrow() throws UnsupportedEncodingException { Assertions.assertThrows(UnsupportedEncodingException.class, () -> { SmppUtils.decodeBody(new byte[] { 0 }, Alphabet.ALPHA_DEFAULT.value(), "X-Gsm7Bit"); }); }
@PUT @Path("{id}") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response updateNetwork(@PathParam("id") String id, InputStream input) throws IOException { log.trace(String.format(MESSAGE, "UPDATE " + id)); String inputStr = IOUtils.toString(input, REST_UTF8); if (!haService.isActive() && !DEFAULT_ACTIVE_IP_ADDRESS.equals(haService.getActiveIp())) { return syncPut(haService, NETWORKS, id, inputStr); } final NeutronNetwork net = (NeutronNetwork) jsonToModelEntity(inputStr, NeutronNetwork.class); adminService.updateNetwork(net); return status(Response.Status.OK).build(); }
@Test public void testUpdateNetworkWithNonexistId() { expect(mockOpenstackHaService.isActive()).andReturn(true).anyTimes(); replay(mockOpenstackHaService); mockOpenstackNetworkAdminService.updateNetwork(anyObject()); expectLastCall().andThrow(new IllegalArgumentException()); replay(mockOpenstackNetworkAdminService); final WebTarget wt = target(); InputStream jsonStream = OpenstackNetworkWebResourceTest.class .getResourceAsStream("openstack-network.json"); Response response = wt.path(PATH + "/396f12f8-521e-4b91-8e21-2e003500433a") .request(MediaType.APPLICATION_JSON_TYPE) .put(Entity.json(jsonStream)); final int status = response.getStatus(); assertThat(status, is(400)); verify(mockOpenstackNetworkAdminService); }
@Override public long getPeriodMillis() { if (!enabled) { return DiagnosticsPlugin.DISABLED; } return PERIOD_MILLIS; }
@Test public void testGetPeriodSeconds() { assertEquals(1000, plugin.getPeriodMillis()); }
public void reconfigure(Device from) throws IOException, GeneralSecurityException { setDeviceAttributes(from); reconfigureConnections(from); reconfigureApplicationEntities(from); reconfigureWebApplications(from); reconfigureKeycloakClients(from); reconfigureDeviceExtensions(from); }
@Test public void testReconfigure() throws Exception { Device d1 = createDevice("test", "AET1"); Device d2 = createDevice("test", "AET2"); d1.reconfigure(d2); ApplicationEntity ae = d1.getApplicationEntity("AET2"); assertNotNull(ae); List<Connection> conns = ae.getConnections(); assertEquals(1, conns.size()); }
public static void checkKeyParam(String dataId, String group) throws NacosException { if (StringUtils.isBlank(dataId) || !ParamUtils.isValid(dataId)) { throw new NacosException(NacosException.CLIENT_INVALID_PARAM, DATAID_INVALID_MSG); } if (StringUtils.isBlank(group) || !ParamUtils.isValid(group)) { throw new NacosException(NacosException.CLIENT_INVALID_PARAM, GROUP_INVALID_MSG); } }
@Test void testCheckKeyParam2() throws NacosException { String dataId = "b"; String group = "c"; String datumId = "a"; ParamUtils.checkKeyParam(dataId, group, datumId); try { dataId = ""; group = "c"; ParamUtils.checkKeyParam(dataId, group, datumId); fail(); } catch (NacosException e) { assertEquals("dataId invalid", e.getMessage()); } try { dataId = "b"; group = ""; ParamUtils.checkKeyParam(dataId, group, datumId); fail(); } catch (NacosException e) { assertEquals("group invalid", e.getMessage()); } try { dataId = "b"; group = "c"; datumId = ""; ParamUtils.checkKeyParam(dataId, group, datumId); fail(); } catch (NacosException e) { assertEquals("datumId invalid", e.getMessage()); } }
@Override @NonNull public Flux<Object> decode(@NonNull Publisher<DataBuffer> input, @NonNull ResolvableType elementType, @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) { ObjectMapper mapper = getObjectMapper(); Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize( Flux.from(input), mapper.getFactory(), mapper, true); ObjectReader reader = getObjectReader(elementType, hints); return tokens .as(LocaleUtils::transform) .handle((tokenBuffer, sink) -> { try { Object value = reader.readValue(tokenBuffer.asParser(getObjectMapper())); logValue(value, hints); if (value != null) { sink.next(value); } } catch (IOException ex) { sink.error(processException(ex)); } }); }
@Test @SneakyThrows public void testGeneric() { ObjectMapper mapper = new ObjectMapper(); CustomJackson2JsonDecoder decoder = new CustomJackson2JsonDecoder(new MapperEntityFactory(), mapper); ResolvableType type = ResolvableType.forClassWithGenerics(PagerResult.class, MyEntity.class); DataBuffer buffer = new DefaultDataBufferFactory().wrap("{\"pageSize\":1,\"data\":[{\"id\":\"test\"}]}".getBytes()); Object object = decoder.decode(buffer, type, MediaType.APPLICATION_JSON, Collections.emptyMap()); assertTrue(object instanceof PagerResult); PagerResult<MyEntity> result= ((PagerResult<MyEntity>) object); assertTrue(result.getData().size()>0); assertEquals(result.getData().get(0).getId(), "test"); }
public static DeletionTaskRecoveryInfo convertProtoToDeletionTaskRecoveryInfo( DeletionServiceDeleteTaskProto proto, DeletionService deletionService) { DeletionTask deletionTask = NMProtoUtils.convertProtoToDeletionTask(proto, deletionService); List<Integer> successorTaskIds = new ArrayList<>(); if (proto.getSuccessorIdsList() != null && !proto.getSuccessorIdsList().isEmpty()) { successorTaskIds = proto.getSuccessorIdsList(); } long deletionTimestamp = proto.getDeletionTime(); return new DeletionTaskRecoveryInfo(deletionTask, successorTaskIds, deletionTimestamp); }
@Test public void testConvertProtoToDeletionTaskRecoveryInfo() throws Exception { long delTime = System.currentTimeMillis(); List<Integer> successorTaskIds = Arrays.asList(1); DeletionTask deletionTask = mock(DeletionTask.class); DeletionTaskRecoveryInfo info = new DeletionTaskRecoveryInfo(deletionTask, successorTaskIds, delTime); assertEquals(deletionTask, info.getTask()); assertEquals(successorTaskIds, info.getSuccessorTaskIds()); assertEquals(delTime, info.getDeletionTimestamp()); }
@Override public StatusOutputStream<Void> write(final Path file, final TransferStatus status, final ConnectionCallback callback) { final OutputStream putStream = session.getClient().putAsOutputStream(file.getAbsolute()); return new VoidStatusOutputStream(putStream); }
@Test public void testWriteUnknownLength() throws Exception { final MantaWriteFeature feature = new MantaWriteFeature(session); final Path container = randomDirectory(); new MantaDirectoryFeature(session).mkdir(container, new TransferStatus()); final byte[] content = RandomUtils.nextBytes(5 * 1024 * 1024); final TransferStatus status = new TransferStatus(); status.setLength(-1L); final Path file = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final StatusOutputStream<Void> out = feature.write(file, status, new DisabledConnectionCallback()); final ByteArrayInputStream in = new ByteArrayInputStream(content); final int alloc = 1024; final byte[] buffer = new byte[alloc]; assertEquals(content.length, IOUtils.copyLarge(in, out, buffer)); out.close(); final PathAttributes found = new MantaAttributesFinderFeature(session).find(file); assertEquals(found.getSize(), content.length); new MantaDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static void ensureCorrectArgs( final FunctionName functionName, final Object[] args, final Class<?>... argTypes ) { if (args == null) { throw new KsqlFunctionException("Null argument list for " + functionName.text() + "."); } if (args.length != argTypes.length) { throw new KsqlFunctionException("Incorrect arguments for " + functionName.text() + "."); } for (int i = 0; i < argTypes.length; i++) { if (args[i] == null) { continue; } if (!argTypes[i].isAssignableFrom(args[i].getClass())) { throw new KsqlFunctionException( String.format( "Incorrect arguments type for %s. " + "Expected %s for arg number %d but found %s.", functionName.text(), argTypes[i].getCanonicalName(), i, args[i].getClass().getCanonicalName() )); } } }
@Test public void shouldPassWithNullArgs() { final Object[] args = new Object[]{"TtestArg1", null}; UdfUtil.ensureCorrectArgs(FUNCTION_NAME, args, String.class, Long.class); }
@Override public Object pageListService(String namespaceId, String groupName, String serviceName, int pageNo, int pageSize, String instancePattern, boolean ignoreEmptyService) throws NacosException { ObjectNode result = JacksonUtils.createEmptyJsonNode(); List<ServiceView> serviceViews = new LinkedList<>(); Collection<Service> services = patternServices(namespaceId, groupName, serviceName); if (ignoreEmptyService) { services = services.stream().filter(each -> 0 != serviceStorage.getData(each).ipCount()) .collect(Collectors.toList()); } result.put(FieldsConstants.COUNT, services.size()); services = doPage(services, pageNo - 1, pageSize); for (Service each : services) { ServiceMetadata serviceMetadata = metadataManager.getServiceMetadata(each).orElseGet(ServiceMetadata::new); ServiceView serviceView = new ServiceView(); serviceView.setName(each.getName()); serviceView.setGroupName(each.getGroup()); serviceView.setClusterCount(serviceStorage.getClusters(each).size()); serviceView.setIpCount(serviceStorage.getData(each).ipCount()); serviceView.setHealthyInstanceCount(countHealthyInstance(serviceStorage.getData(each))); serviceView.setTriggerFlag(isProtectThreshold(serviceView, serviceMetadata) ? "true" : "false"); serviceViews.add(serviceView); } result.set(FieldsConstants.SERVICE_LIST, JacksonUtils.transferToJsonNode(serviceViews)); return result; }
@Test void testPageListServiceForPage() throws NacosException { ServiceInfo serviceInfo = new ServiceInfo(); Mockito.when(serviceStorage.getData(Mockito.any())).thenReturn(serviceInfo); ServiceManager.getInstance().getSingleton(Service.newService("CatalogService", "CatalogService", "1")); ServiceManager.getInstance().getSingleton(Service.newService("CatalogService", "CatalogService", "2")); ServiceManager.getInstance().getSingleton(Service.newService("CatalogService", "CatalogService", "3")); ObjectNode obj = (ObjectNode) catalogServiceV2Impl.pageListService("CatalogService", "", "", 2, 1, null, false); assertEquals(3, obj.get(FieldsConstants.COUNT).asInt()); assertEquals("2", obj.get(FieldsConstants.SERVICE_LIST).get(0).get("name").asText()); }
@Override public long getLength() { if (length == null) { length = objectMetadata().getSize(); } return length; }
@Test public void testGetLength() { OSSURI uri = randomURI(); int dataSize = 8; byte[] data = randomData(dataSize); writeOSSData(uri, data); verifyLength(ossMock, uri, data, true); verify(ossMock, times(0)).getSimplifiedObjectMeta(uri.bucket(), uri.key()); reset(ossMock); verifyLength(ossMock, uri, data, false); verify(ossMock, times(1)).getSimplifiedObjectMeta(uri.bucket(), uri.key()); reset(ossMock); }
@Override public boolean isPluginLoaded(String pluginId) { final GoPluginDescriptor descriptor = getPluginDescriptorFor(pluginId); return !(descriptor == null || descriptor.isInvalid()); }
@Test void isPluginLoaded_shouldReturnFalseWhenPluginIsNotLoaded() { when(registry.getPlugin("cd.go.elastic-agent.docker")).thenReturn(null); DefaultPluginManager pluginManager = new DefaultPluginManager(monitor, registry, mock(GoPluginOSGiFramework.class), jarChangeListener, pluginRequestProcessorRegistry, systemEnvironment, pluginLoader); assertThat(pluginManager.isPluginLoaded("cd.go.elastic-agent.docker")).isFalse(); }
@Udf public String decodeParam( @UdfParameter(description = "the value to decode") final String input) { if (input == null) { return null; } try { return URLDecoder.decode(input, UTF_8.name()); } catch (final UnsupportedEncodingException e) { throw new KsqlFunctionException( "url_decode udf encountered an encoding exception while decoding: " + input, e); } }
@Test public void shouldReturnEmptyStringForEmptyInput() { assertThat(decodeUdf.decodeParam(""), equalTo("")); }
@Override public String create(UserDto user) { UserDto userDto = requireNonNull(user, "User cannot be null"); return hash(requireNonNull(emptyToNull(userDto.getEmail()), "Email cannot be null")); }
@Test public void create_is_case_insensitive() { assertThat(underTest.create(UserTesting.newUserDto("john", "John", "john@doo.com"))).isEqualTo(underTest.create(UserTesting.newUserDto("john", "John", "John@Doo.com"))); }
@Override protected ExecuteContext doBefore(ExecuteContext context) { final Optional<Object> rpcClient = ReflectUtils.getFieldValue(context.getObject(), "rpcClient"); if (!rpcClient.isPresent()) { return context; } final Object client = rpcClient.get(); final Optional<Object> isShutdownRaw = ReflectUtils.invokeMethod(client, "isShutdown", null, null); if (isShutdownRaw.isPresent() && isShutdownRaw.get() instanceof Boolean) { boolean isShutdown = (boolean) isShutdownRaw.get(); if (isShutdown) { LOGGER.info("RpcClient has been shutdown, skip deregister operation!"); context.skip(null); } } return context; }
@Test public void test() throws NoSuchMethodException { final NacosGrpcDeRegisterInterceptor interceptor = new NacosGrpcDeRegisterInterceptor(); final ExecuteContext context = buildContext(new NacosGrpcProxy1()); final ExecuteContext context1 = interceptor.doBefore(context); Assert.assertFalse(context1.isSkip()); final ExecuteContext context2 = buildContext(new NacosRealGrpcProxy()); final ExecuteContext context3 = interceptor.doBefore(context2); Assert.assertFalse(context3.isSkip()); final NacosRealGrpcProxy nacosRealGrpcProxy = new NacosRealGrpcProxy(); nacosRealGrpcProxy.rpcClient.isShutdown = true; final ExecuteContext context4 = buildContext(nacosRealGrpcProxy); final ExecuteContext context5 = interceptor.doBefore(context4); Assert.assertTrue(context5.isSkip()); }
@Override public String toString() { return MoreObjects.toStringHelper(this).add("counters", counters.values()).toString(); }
@Test public void testAddOrReuseWithIncompatibleTypesThrowsException() { Counter<?, ?> c1 = counterSet.longSum(name1); thrown.expect(IllegalArgumentException.class); // Should print the existing counter thrown.expectMessage(c1.toString()); // Should print the counter we're trying to create thrown.expectMessage(new CounterFactory().doubleSum(name1).toString()); // Should print the contents of the counter set thrown.expectMessage(counterSet.toString()); counterSet.doubleSum(name1); }
public static boolean isCreditCode(CharSequence creditCode) { if (false == isCreditCodeSimple(creditCode)) { return false; } final int parityBit = getParityBit(creditCode); if (parityBit < 0) { return false; } return creditCode.charAt(17) == BASE_CODE_ARRAY[parityBit]; }
@Test public void isCreditCode() { String testCreditCode = "91310110666007217T"; assertTrue(CreditCodeUtil.isCreditCode(testCreditCode)); }
@Operation(summary = "Request a new mijn digid session based on an app session") @PostMapping(value = "/request_session", consumes = "application/json") public ResponseEntity<?> requestSession(@RequestBody @Valid MijnDigidSessionRequest request){ if(request == null || request.getAppSessionId() == null) { return ResponseEntity.badRequest().build(); } String mijnDigiDSessionId = mijnDigiDSessionService.createSession(request.getAppSessionId()).getId(); return ResponseEntity .ok() .header(MijnDigidSession.MIJN_DIGID_SESSION_HEADER, mijnDigiDSessionId) .build(); }
@Test void validateBadRequestOnNoRequest() { ResponseEntity<?> response = mijnDigiDSessionController.requestSession(null); assertEquals(response.getStatusCode(), HttpStatus.BAD_REQUEST); }
@VisibleForTesting Object evaluate(final GenericRow row) { return term.getValue(new TermEvaluationContext(row)); }
@Test public void shouldEvaluateCastToMap() { // Given: final Expression cast1 = new Cast( new CreateMapExpression(ImmutableMap.of( new StringLiteral("1"), new StringLiteral("2"), new StringLiteral("3"), new StringLiteral("4"))), new Type(SqlTypes.map(SqlTypes.INTEGER, SqlTypes.INTEGER)) ); final Expression cast2 = new Cast( new CreateMapExpression(ImmutableMap.of( new DoubleLiteral(2.5), new StringLiteral("2"), new DoubleLiteral(3.5), new StringLiteral("4"))), new Type(SqlTypes.map(SqlTypes.STRING, SqlTypes.BIGINT)) ); // When: InterpretedExpression interpreter1 = interpreter(cast1); InterpretedExpression interpreter2 = interpreter(cast2); // Then: assertThat(interpreter1.evaluate(ROW), is(ImmutableMap.of(1, 2, 3, 4))); assertThat(interpreter2.evaluate(ROW), is(ImmutableMap.of("2.5", 2L, "3.5", 4L))); }
@Override public void handleReply(Reply reply) { if (failure.get() != null) { return; } if (containsFatalErrors(reply.getErrors())) { failure.compareAndSet(null, new IOException(formatErrors(reply))); return; } long now = System.currentTimeMillis(); long latency = now - (long) reply.getContext(); numReplies.incrementAndGet(); accumulateReplies(now, latency); }
@Test public void requireThatXMLFeederWorks() throws Throwable { assertFeed("<vespafeed>" + " <document documenttype='simple' documentid='id:scheme:simple::0'>" + " <my_str>foo</my_str>" + " </document>" + " <update documenttype='simple' documentid='id:scheme:simple::1'>" + " <assign field='my_str'>bar</assign>" + " </update>" + " <remove documenttype='simple' documentid='id:scheme:simple::2'/>" + "</vespafeed>", new MessageHandler() { @Override public void handleMessage(Message msg) { Reply reply = ((DocumentMessage)msg).createReply(); reply.swapState(msg); reply.popHandler().handleReply(reply); } }, "", "(.+\n)+" + "\\s*\\d+,\\s*3,.+\n"); }
public static List<String> splitToWhiteSpaceSeparatedTokens(String input) { if (input == null) { return new ArrayList<>(); } StringTokenizer tokenizer = new StringTokenizer(input.trim(), QUOTE_CHAR + WHITESPACE, true); List<String> tokens = new ArrayList<>(); StringBuilder quotedText = new StringBuilder(); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (QUOTE_CHAR.equals(token)) { // if we have a quote, add the next tokens to the quoted text // until the quoting has finished quotedText.append(QUOTE_CHAR); String buffer = quotedText.toString(); if (isSingleQuoted(buffer) || isDoubleQuoted(buffer)) { tokens.add(buffer.substring(1, buffer.length() - 1)); quotedText = new StringBuilder(); } } else if (WHITESPACE.equals(token)) { // a white space, if in quote, add the white space, otherwise // skip it if (quotedText.length() > 0) { quotedText.append(WHITESPACE); } } else { if (quotedText.length() > 0) { quotedText.append(token); } else { tokens.add(token); } } } if (quotedText.length() > 0) { throw new IllegalArgumentException("Invalid quoting found in args " + quotedText); } return tokens; }
@Test public void testDoubleQuoteAndSpace() { List<String> args = splitToWhiteSpaceSeparatedTokens("\"\"arg0\"\" arg1"); assertEquals("\"arg0\"", args.get(0)); assertEquals("arg1", args.get(1)); }
public static SchemaKStream<?> buildSource( final PlanBuildContext buildContext, final DataSource dataSource, final QueryContext.Stacker contextStacker ) { final boolean windowed = dataSource.getKsqlTopic().getKeyFormat().isWindowed(); switch (dataSource.getDataSourceType()) { case KSTREAM: return windowed ? buildWindowedStream( buildContext, dataSource, contextStacker ) : buildStream( buildContext, dataSource, contextStacker ); case KTABLE: return windowed ? buildWindowedTable( buildContext, dataSource, contextStacker ) : buildTable( buildContext, dataSource, contextStacker ); default: throw new UnsupportedOperationException("Source type:" + dataSource.getDataSourceType()); } }
@Test public void shouldBuildV2NonWindowedTable() { // Given: givenNonWindowedTable(); // When: final SchemaKStream<?> result = SchemaKSourceFactory.buildSource( buildContext, dataSource, contextStacker ); // Then: assertThat(result, instanceOf(SchemaKTable.class)); assertThat(result.getSourceStep(), instanceOf(TableSource.class)); assertValidSchema(result); assertThat(result.getSourceStep().getSources(), is(empty())); }
static PrimitiveType needsDataTypeUpdate(Type currentIcebergType, Schema valueSchema) { if (currentIcebergType.typeId() == TypeID.FLOAT && valueSchema.type() == Schema.Type.FLOAT64) { return DoubleType.get(); } if (currentIcebergType.typeId() == TypeID.INTEGER && valueSchema.type() == Schema.Type.INT64) { return LongType.get(); } return null; }
@Test public void testNeedsDataTypeUpdate() { // valid updates assertThat(SchemaUtils.needsDataTypeUpdate(FloatType.get(), Schema.FLOAT64_SCHEMA)) .isInstanceOf(DoubleType.class); assertThat(SchemaUtils.needsDataTypeUpdate(IntegerType.get(), Schema.INT64_SCHEMA)) .isInstanceOf(LongType.class); // other updates will be skipped assertThat(SchemaUtils.needsDataTypeUpdate(IntegerType.get(), Schema.STRING_SCHEMA)).isNull(); assertThat(SchemaUtils.needsDataTypeUpdate(FloatType.get(), Schema.STRING_SCHEMA)).isNull(); assertThat(SchemaUtils.needsDataTypeUpdate(StringType.get(), Schema.INT64_SCHEMA)).isNull(); }
public String parseToStringExample() throws IOException, SAXException, TikaException { Tika tika = new Tika(); try (InputStream stream = ParsingExample.class.getResourceAsStream("test.doc")) { return tika.parseToString(stream); } }
@Test public void testParseToStringExample() throws IOException, SAXException, TikaException { String result = parsingExample .parseToStringExample() .trim(); assertEquals("test", result, "enough detectors?"); }
public void isAbsent() { if (actual == null) { failWithActual(simpleFact("expected absent optional")); } else if (actual.isPresent()) { failWithoutActual( simpleFact("expected to be absent"), fact("but was present with value", actual.get())); } }
@Test public void isAbsentFailingNull() { expectFailureWhenTestingThat(null).isAbsent(); assertFailureKeys("expected absent optional", "but was"); }
static double toDouble(final JsonNode object) { if (object instanceof NumericNode) { return object.doubleValue(); } if (object instanceof TextNode) { try { return Double.parseDouble(object.textValue()); } catch (final NumberFormatException e) { throw failedStringCoercionException(SqlBaseType.DOUBLE); } } throw invalidConversionException(object, SqlBaseType.DOUBLE); }
@Test(expected = IllegalArgumentException.class) public void shouldFailWhenConvertingIncompatibleDouble() { JsonSerdeUtils.toDouble(JsonNodeFactory.instance.booleanNode(true)); }
public Account changeNumber(final Account account, final String targetNumber, @Nullable final IdentityKey pniIdentityKey, @Nullable final Map<Byte, ECSignedPreKey> pniSignedPreKeys, @Nullable final Map<Byte, KEMSignedPreKey> pniPqLastResortPreKeys, @Nullable final Map<Byte, Integer> pniRegistrationIds) throws InterruptedException, MismatchedDevicesException { final String originalNumber = account.getNumber(); final UUID originalPhoneNumberIdentifier = account.getPhoneNumberIdentifier(); if (originalNumber.equals(targetNumber)) { if (pniIdentityKey != null) { throw new IllegalArgumentException("change number must supply a changed phone number; otherwise use updatePniKeys"); } return account; } validateDevices(account, pniSignedPreKeys, pniPqLastResortPreKeys, pniRegistrationIds); final AtomicReference<Account> updatedAccount = new AtomicReference<>(); accountLockManager.withLock(List.of(account.getNumber(), targetNumber), () -> { redisDelete(account); // There are three possible states for accounts associated with the target phone number: // // 1. An account exists with the target number; the caller has proved ownership of the number, so delete the // account with the target number. This will leave a "deleted account" record for the deleted account mapping // the UUID of the deleted account to the target phone number. We'll then overwrite that so it points to the // original number to facilitate switching back and forth between numbers. // 2. No account with the target number exists, but one has recently been deleted. In that case, add a "deleted // account" record that maps the ACI of the recently-deleted account to the now-abandoned original phone number // of the account changing its number (which facilitates ACI consistency in cases that a party is switching // back and forth between numbers). // 3. No account with the target number exists at all, in which case no additional action is needed. final Optional<UUID> recentlyDeletedAci = accounts.findRecentlyDeletedAccountIdentifier(targetNumber); final Optional<Account> maybeExistingAccount = getByE164(targetNumber); final Optional<UUID> maybeDisplacedUuid; if (maybeExistingAccount.isPresent()) { delete(maybeExistingAccount.get()).join(); maybeDisplacedUuid = maybeExistingAccount.map(Account::getUuid); } else { maybeDisplacedUuid = recentlyDeletedAci; } final UUID uuid = account.getUuid(); final UUID phoneNumberIdentifier = phoneNumberIdentifiers.getPhoneNumberIdentifier(targetNumber); CompletableFuture.allOf( keysManager.deleteSingleUsePreKeys(phoneNumberIdentifier), keysManager.deleteSingleUsePreKeys(originalPhoneNumberIdentifier)) .join(); final Collection<TransactWriteItem> keyWriteItems = buildPniKeyWriteItems(uuid, phoneNumberIdentifier, pniSignedPreKeys, pniPqLastResortPreKeys); final Account numberChangedAccount = updateWithRetries( account, a -> { setPniKeys(account, pniIdentityKey, pniRegistrationIds); return true; }, a -> accounts.changeNumber(a, targetNumber, phoneNumberIdentifier, maybeDisplacedUuid, keyWriteItems), () -> accounts.getByAccountIdentifier(uuid).orElseThrow(), AccountChangeValidator.NUMBER_CHANGE_VALIDATOR); updatedAccount.set(numberChangedAccount); }, accountLockExecutor); return updatedAccount.get(); }
@Test void testChangePhoneNumberSameNumber() throws InterruptedException, MismatchedDevicesException { final String number = "+14152222222"; Account account = AccountsHelper.generateTestAccount(number, UUID.randomUUID(), UUID.randomUUID(), new ArrayList<>(), new byte[UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH]); account = accountsManager.changeNumber(account, number, null, null, null, null); assertEquals(number, account.getNumber()); verify(keysManager, never()).deleteSingleUsePreKeys(any()); }
@Override public void close() { if (ch.isOpen()) { ch.close(); } }
@Test public void testResolveAorAAAA() throws Exception { DnsNameResolver resolver = newResolver(ResolvedAddressTypes.IPV4_PREFERRED).build(); try { testResolve0(resolver, EXCLUSIONS_RESOLVE_A, AAAA); } finally { resolver.close(); } }
@Override public void updateMember(ShareGroupMember newMember) { if (newMember == null) { throw new IllegalArgumentException("newMember cannot be null."); } ShareGroupMember oldMember = members.put(newMember.memberId(), newMember); maybeUpdateSubscribedTopicNamesAndGroupSubscriptionType(oldMember, newMember); maybeUpdateGroupState(); }
@Test public void testUpdateMember() { ShareGroup shareGroup = createShareGroup("foo"); ShareGroupMember member; member = shareGroup.getOrMaybeCreateMember("member", true); member = new ShareGroupMember.Builder(member) .setSubscribedTopicNames(Arrays.asList("foo", "bar")) .build(); shareGroup.updateMember(member); assertEquals(member, shareGroup.getOrMaybeCreateMember("member", false)); }
public String migrate(String oldJSON, int targetVersion) { LOGGER.debug("Migrating to version {}: {}", targetVersion, oldJSON); Chainr transform = getTransformerFor(targetVersion); Object transformedObject = transform.transform(JsonUtils.jsonToMap(oldJSON), getContextMap(targetVersion)); String transformedJSON = JsonUtils.toJsonString(transformedObject); LOGGER.debug("After migration to version {}: {}", targetVersion, transformedJSON); return transformedJSON; }
@Test void currentContractVersionShouldBeTheHighestPossibleMigration() { assertThat(JsonMessageHandler1_0.CURRENT_CONTRACT_VERSION, is(JsonMessageHandler3_0.CURRENT_CONTRACT_VERSION)); assertThat(JsonMessageHandler2_0.CURRENT_CONTRACT_VERSION, is(JsonMessageHandler3_0.CURRENT_CONTRACT_VERSION)); new ConfigRepoMigrator().migrate("{}", JsonMessageHandler3_0.CURRENT_CONTRACT_VERSION); try { new ConfigRepoMigrator().migrate("{}", JsonMessageHandler3_0.CURRENT_CONTRACT_VERSION + 1); fail("Should have failed to migrate to wrong version which is one more than the current contract version"); } catch (RuntimeException e) { assertThat(e.getMessage(), is(String.format("Failed to migrate to version %s", JsonMessageHandler3_0.CURRENT_CONTRACT_VERSION + 1))); } }
@Override public double entropy() { return entropy; }
@Test public void testEntropy() { System.out.println("entropy"); LogNormalDistribution instance = new LogNormalDistribution(1.0, 1.0); instance.rand(); assertEquals(2.418939, instance.entropy(), 1E-6); }
public static String localIP() { if (!StringUtils.isEmpty(localIp)) { return localIp; } if (System.getProperties().containsKey(CLIENT_LOCAL_IP_PROPERTY)) { return localIp = System.getProperty(CLIENT_LOCAL_IP_PROPERTY, getAddress()); } localIp = getAddress(); return localIp; }
@Test void testLocalIpWithException() throws Exception { Field field = System.class.getDeclaredField("props"); field.setAccessible(true); Properties properties = (Properties) field.get(null); Properties mockProperties = mock(Properties.class); when(mockProperties.getProperty("java.net.preferIPv6Addresses")).thenThrow(new RuntimeException("test")); field.set(null, mockProperties); try { System.setProperty("java.net.preferIPv6Addresses", "aaa"); InetAddress expect = InetAddress.getLocalHost(); assertEquals(expect.getHostAddress(), NetUtils.localIP()); } finally { field.set(null, properties); } }
@Override public HadoopConf buildHadoopConfWithReadOnlyConfig(ReadonlyConfig readonlyConfig) { Configuration configuration = loadHiveBaseHadoopConfig(readonlyConfig); Config config = fillBucket(readonlyConfig, configuration); HadoopConf hadoopConf = OssHadoopConf.buildWithConfig(ReadonlyConfig.fromConfig(config)); Map<String, String> propsInConfiguration = configuration.getPropsWithPrefix(StringUtils.EMPTY); hadoopConf.setExtraOptions(propsInConfiguration); return hadoopConf; }
@Test void fillBucketInHadoopConf() { OSSStorage ossStorage = new OSSStorage(); HadoopConf ossnConf = ossStorage.buildHadoopConfWithReadOnlyConfig(OSS); assertHadoopConf(ossnConf); }
@Override public void run(Namespace namespace, Liquibase liquibase) throws Exception { String scopeId = null; if (scopedObjects != null) { scopeId = Scope.enter(scopedObjects); } final AbstractLiquibaseCommand<T> subcommand = requireNonNull(subcommands.get(namespace.getString(COMMAND_NAME_ATTR)), "Unable find the command"); try { subcommand.run(namespace, liquibase); } finally { if (scopeId != null) { Scope.exit(scopeId); } } }
@Test void testRunSubCommand() throws Exception { final String databaseUrl = MigrationTestSupport.getDatabaseUrl(); final TestMigrationConfiguration conf = MigrationTestSupport.createConfiguration(databaseUrl); dbCommand.run(null, new Namespace(Collections.singletonMap("subcommand", "migrate")), conf); try (Handle handle = Jdbi.create(databaseUrl, "sa", "").open()) { assertThat(handle.createQuery("select count(*) from persons") .mapTo(Integer.class) .first()).isEqualTo(1); } }
@Override public UserIdentity login(String username, Object credentials, ServletRequest request) { if (!(credentials instanceof SignedJWT)) { return null; } if (!(request instanceof HttpServletRequest)) { return null; } SignedJWT jwtToken = (SignedJWT) credentials; JWTClaimsSet claimsSet; boolean valid; try { claimsSet = jwtToken.getJWTClaimsSet(); valid = validateToken(jwtToken, claimsSet, username); } catch (ParseException e) { JWT_LOGGER.warn(String.format("%s: Couldn't parse a JWT token", username), e); return null; } if (valid) { String serializedToken = (String) request.getAttribute(JwtAuthenticator.JWT_TOKEN_REQUEST_ATTRIBUTE); UserIdentity rolesDelegate = _authorizationService.getUserIdentity((HttpServletRequest) request, username); if (rolesDelegate == null) { return null; } else { return getUserIdentity(jwtToken, claimsSet, serializedToken, username, rolesDelegate); } } else { return null; } }
@Test public void testFailExpirationValidation() throws Exception { UserStore testUserStore = new UserStore(); testUserStore.addUser(TEST_USER, SecurityUtils.NO_CREDENTIAL, new String[] {"USER"}); TokenGenerator.TokenAndKeys tokenAndKeys = TokenGenerator.generateToken(TEST_USER, 1L); JwtLoginService loginService = new JwtLoginService(new UserStoreAuthorizationService(testUserStore), tokenAndKeys.publicKey(), null); SignedJWT jwtToken = SignedJWT.parse(tokenAndKeys.token()); HttpServletRequest request = mock(HttpServletRequest.class); UserIdentity identity = loginService.login(TEST_USER, jwtToken, request); assertNull(identity); }
@Override public void verify(byte[] data, byte[] signature, MessageDigest digest) { final byte[] decrypted = engine.processBlock(signature, 0, signature.length); final int delta = checkSignature(decrypted, digest); final int offset = decrypted.length - digest.getDigestLength() - delta; digest.update(decrypted, 1, offset - 1); digest.update(data); if (!CryptoUtils.compare(digest.digest(), decrypted, offset)) { throw new VerificationException("Invalid signature"); } }
@Test public void shouldThrowVerificationExceptionIfHeaderIsInvalid() { final byte[] challenge = CryptoUtils.random(40); final byte[] signature = sign(0, challenge, ISOTrailers.TRAILER_SHA1, "SHA1"); thrown.expect(VerificationException.class); thrown.expectMessage("Invalid start byte in signature"); new DssRsaSignatureVerifier(PUBLIC).verify(challenge, signature, "SHA-512"); }
public void setLocalTimeZone(ZoneId zoneId) { final String zone; if (zoneId instanceof ZoneOffset) { // Give ZoneOffset a timezone for backwards compatibility reasons. // In general, advertising either TZDB ID, GMT+xx:xx, or UTC is the best we can do. zone = ZoneId.ofOffset("GMT", (ZoneOffset) zoneId).toString(); } else { zone = zoneId.toString(); } validateTimeZone(zone); configuration.set(TableConfigOptions.LOCAL_TIME_ZONE, zone); }
@Test public void testInvalidGmtLocalTimeZone() { assertThatThrownBy(() -> CONFIG_BY_METHOD.setLocalTimeZone(ZoneId.of("GMT-8:00"))) .isInstanceOf(DateTimeException.class) .hasMessage("Invalid ID for offset-based ZoneId: GMT-8:00"); }
@Override public void trash(final Local file) throws LocalAccessDeniedException { if(log.isDebugEnabled()) { log.debug(String.format("Move %s to Trash", file)); } final ObjCObjectByReference error = new ObjCObjectByReference(); if(!NSFileManager.defaultManager().trashItemAtURL_resultingItemURL_error( NSURL.fileURLWithPath(file.getAbsolute()), null, error)) { final NSError f = error.getValueAs(NSError.class); if(null == f) { throw new LocalAccessDeniedException(file.getAbsolute()); } throw new LocalAccessDeniedException(String.format("%s", f.localizedDescription())); } }
@Test public void testTrashNonEmpty() throws Exception { final Trash trash = new FileManagerTrashFeature(); final SupportDirectoryFinder finder = new TemporarySupportDirectoryFinder(); final Local temp = finder.find(); final Local directory = LocalFactory.get(temp, UUID.randomUUID().toString()); directory.mkdir(); final Local sub = LocalFactory.get(directory, UUID.randomUUID().toString()); sub.mkdir(); final Local file = LocalFactory.get(sub, UUID.randomUUID().toString()); final Touch touch = LocalTouchFactory.get(); touch.touch(file); trash.trash(directory); }
public static Catalog loadCatalog( String impl, String catalogName, Map<String, String> properties, Object hadoopConf) { Preconditions.checkNotNull(impl, "Cannot initialize custom Catalog, impl class name is null"); DynConstructors.Ctor<Catalog> ctor; try { ctor = DynConstructors.builder(Catalog.class).impl(impl).buildChecked(); } catch (NoSuchMethodException e) { throw new IllegalArgumentException( String.format("Cannot initialize Catalog implementation %s: %s", impl, e.getMessage()), e); } Catalog catalog; try { catalog = ctor.newInstance(); } catch (ClassCastException e) { throw new IllegalArgumentException( String.format("Cannot initialize Catalog, %s does not implement Catalog.", impl), e); } configureHadoopConf(catalog, hadoopConf); catalog.initialize(catalogName, properties); return catalog; }
@Test public void loadCustomCatalog_ConstructorErrorCatalog() { Map<String, String> options = Maps.newHashMap(); options.put("key", "val"); Configuration hadoopConf = new Configuration(); String name = "custom"; String impl = TestCatalogErrorConstructor.class.getName(); assertThatThrownBy(() -> CatalogUtil.loadCatalog(impl, name, options, hadoopConf)) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Cannot initialize Catalog implementation") .hasMessageContaining("NoClassDefFoundError: Error while initializing class"); }
@Override public void setConfig(RedisClusterNode node, String param, String value) { RedisClient entry = getEntry(node); RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_SET, param, value); syncFuture(f); }
@Test public void testSetConfig() { RedisClusterNode master = getFirstMaster(); connection.setConfig(master, "timeout", "10"); }
@Override public boolean isFinished() { return finishing && outputPage == null; }
@Test(dataProvider = "hashEnabledValues") public void testBuildSideNulls(boolean hashEnabled) { DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext(); // build OperatorContext operatorContext = driverContext.addOperatorContext(0, new PlanNodeId("test"), ValuesOperator.class.getSimpleName()); List<Type> buildTypes = ImmutableList.of(BIGINT); RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), buildTypes); Operator buildOperator = new ValuesOperator(operatorContext, rowPagesBuilder .row(0L) .row(1L) .row(2L) .row(2L) .row(3L) .row((Object) null) .build()); SetBuilderOperatorFactory setBuilderOperatorFactory = new SetBuilderOperatorFactory( 1, new PlanNodeId("test"), buildTypes.get(0), 0, rowPagesBuilder.getHashChannel(), 10, new JoinCompiler(createTestMetadataManager())); Operator setBuilderOperator = setBuilderOperatorFactory.createOperator(driverContext); Driver driver = Driver.createDriver(driverContext, buildOperator, setBuilderOperator); while (!driver.isFinished()) { driver.process(); } // probe List<Type> probeTypes = ImmutableList.of(BIGINT); RowPagesBuilder rowPagesBuilderProbe = rowPagesBuilder(hashEnabled, Ints.asList(0), probeTypes); List<Page> probeInput = rowPagesBuilderProbe .addSequencePage(4, 1) .build(); Optional<Integer> probeHashChannel = hashEnabled ? Optional.of(probeTypes.size()) : Optional.empty(); HashSemiJoinOperatorFactory joinOperatorFactory = new HashSemiJoinOperatorFactory( 2, new PlanNodeId("test"), setBuilderOperatorFactory.getSetProvider(), rowPagesBuilderProbe.getTypes(), 0, probeHashChannel); // expected MaterializedResult expected = resultBuilder(driverContext.getSession(), concat(probeTypes, ImmutableList.of(BOOLEAN))) .row(1L, true) .row(2L, true) .row(3L, true) .row(4L, null) .build(); OperatorAssertion.assertOperatorEquals(joinOperatorFactory, driverContext, probeInput, expected, hashEnabled, ImmutableList.of(probeTypes.size())); }
public static IntentId valueOf(long value) { return new IntentId(value); }
@Test public void valueOf() { IntentId id = new IntentId(0xdeadbeefL); assertEquals("incorrect valueOf", id, IntentId.valueOf(0xdeadbeefL)); }
public T addFromMandatoryProperty(Props props, String propertyName) { String value = props.nonNullValue(propertyName); if (!value.isEmpty()) { String splitRegex = " (?=-)"; List<String> jvmOptions = Arrays.stream(value.split(splitRegex)).map(String::trim).toList(); checkOptionFormat(propertyName, jvmOptions); checkMandatoryOptionOverwrite(propertyName, jvmOptions); options.addAll(jvmOptions); } return castThis(); }
@Test @UseDataProvider("variousEmptyStrings") public void addFromMandatoryProperty_fails_with_MessageException_if_property_does_not_start_with_dash_after_trimmed(String emptyString) { properties.put(randomPropertyName, emptyString + "foo -bar"); expectJvmOptionNotEmptyAndStartByDashMessageException(() -> underTest.addFromMandatoryProperty(new Props(properties), randomPropertyName), randomPropertyName, "foo"); }
@Override public String toString(final RouteUnit routeUnit) { if (null != ownerName && !Strings.isNullOrEmpty(ownerName.getValue()) && tableName.getValue().equals(ownerName.getValue())) { Set<String> actualTableNames = routeUnit.getActualTableNames(tableName.getValue()); String actualTableName = actualTableNames.isEmpty() ? tableName.getValue().toLowerCase() : actualTableNames.iterator().next(); return tableName.getQuoteCharacter().wrap(actualTableName) + "."; } return toString(); }
@Test void assertOwnerTokenWithOwnerNameNotEqualsTableName() { OwnerToken ownerToken = new OwnerToken(0, 1, new IdentifierValue("u"), new IdentifierValue("t_user")); assertThat(ownerToken.toString(buildRouteUnit()), is("u.")); assertTokenGrid(ownerToken); }
public LRUCache(int capacity) { super(calculateInitialCapacity(capacity), LOAD_FACTOR, true); this.capacity = capacity; }
@Test public void lruCacheTest() { LRUCache<String, String> lruCache = createLRUCache(2); lruCache.put(KEY1, VALUE1); Assert.assertEquals(VALUE1, lruCache.get(KEY1)); Assert.assertFalse(lruCache.containsKey(KEY2)); Assert.assertFalse(lruCache.containsKey(KEY3)); lruCache.put(KEY2, VALUE2); Assert.assertEquals(VALUE1, lruCache.get(KEY1)); Assert.assertEquals(VALUE2, lruCache.get(KEY2)); Assert.assertFalse(lruCache.containsKey(KEY3)); lruCache.put(KEY3, VALUE3); Assert.assertFalse(lruCache.containsKey(KEY1)); Assert.assertEquals(VALUE2, lruCache.get(KEY2)); Assert.assertEquals(VALUE3, lruCache.get(KEY3)); lruCache.put(KEY1, VALUE1); Assert.assertEquals(VALUE1, lruCache.get(KEY1)); Assert.assertFalse(lruCache.containsKey(KEY2)); Assert.assertEquals(VALUE3, lruCache.get(KEY3)); }
public static TOlapTableLocationParam createLocation(OlapTable table, TOlapTablePartitionParam partitionParam, boolean enableReplicatedStorage) throws UserException { return createLocation(table, partitionParam, enableReplicatedStorage, WarehouseManager.DEFAULT_WAREHOUSE_ID); }
@Test public void testReplicatedStorageWithLocalTablet(@Mocked GlobalStateMgr globalStateMgr, @Mocked SystemInfoService systemInfoService) throws Exception { long dbId = 1L; long tableId = 2L; long partitionId = 3L; long indexId = 4L; long tabletId = 5L; long replicaId = 10L; long backendId = 20L; // Columns List<Column> columns = new ArrayList<Column>(); Column k1 = new Column("k1", Type.INT, true, null, "", ""); columns.add(k1); columns.add(new Column("k2", Type.BIGINT, true, null, "", "")); columns.add(new Column("v", Type.BIGINT, false, AggregateType.SUM, "0", "")); MaterializedIndex index = new MaterializedIndex(indexId, MaterializedIndex.IndexState.NORMAL); for (int i = 0; i < 9; i++) { // Replica Replica replica1 = new Replica(replicaId, backendId, Replica.ReplicaState.NORMAL, 1, 0); Replica replica2 = new Replica(replicaId + 1, backendId + 1, Replica.ReplicaState.NORMAL, 1, 0); Replica replica3 = new Replica(replicaId + 2, backendId + 2, Replica.ReplicaState.NORMAL, 1, 0); // Tablet LocalTablet tablet = new LocalTablet(tabletId); tablet.addReplica(replica1); tablet.addReplica(replica2); tablet.addReplica(replica3); // Index TabletMeta tabletMeta = new TabletMeta(dbId, tableId, partitionId, indexId, 0, TStorageMedium.SSD); index.addTablet(tablet, tabletMeta); } // Partition info and distribution info DistributionInfo distributionInfo = new HashDistributionInfo(1, Lists.newArrayList(k1)); PartitionInfo partitionInfo = new SinglePartitionInfo(); partitionInfo.setDataProperty(partitionId, new DataProperty(TStorageMedium.SSD)); partitionInfo.setIsInMemory(partitionId, false); partitionInfo.setTabletType(partitionId, TTabletType.TABLET_TYPE_DISK); partitionInfo.setReplicationNum(partitionId, (short) 3); // Partition Partition partition = new Partition(partitionId, "p1", index, distributionInfo); // Table OlapTable table = new OlapTable(tableId, "t1", columns, KeysType.AGG_KEYS, partitionInfo, distributionInfo); Deencapsulation.setField(table, "baseIndexId", indexId); table.addPartition(partition); table.setIndexMeta(indexId, "t1", columns, 0, 0, (short) 3, TStorageType.COLUMN, KeysType.AGG_KEYS); new Expectations() { { GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo(); result = systemInfoService; systemInfoService.checkExceedDiskCapacityLimit((Multimap<Long, Long>) any, anyBoolean); result = Status.OK; GlobalStateMgr.getCurrentState(); result = globalStateMgr; globalStateMgr.getNodeMgr().getClusterInfo(); result = systemInfoService; systemInfoService.checkBackendAlive(anyLong); result = true; } }; TOlapTablePartitionParam partitionParam = new TOlapTablePartitionParam(); TOlapTablePartition tPartition = new TOlapTablePartition(); tPartition.setId(partitionId); partitionParam.addToPartitions(tPartition); TOlapTableLocationParam param = OlapTableSink.createLocation( table, partitionParam, true); System.out.println(param); // Check List<TTabletLocation> locations = param.getTablets(); Assert.assertEquals(9, locations.size()); HashMap<Long, Integer> beCount = new HashMap<>(); for (TTabletLocation location : locations) { List<Long> nodes = location.getNode_ids(); Assert.assertEquals(3, nodes.size()); beCount.put(nodes.get(0), beCount.getOrDefault(nodes.get(0), 0) + 1); } for (Integer v : beCount.values()) { Assert.assertEquals(3, v.longValue()); } }
static void setConstructor(final ScorecardCompilationDTO compilationDTO, final ClassOrInterfaceDeclaration modelTemplate, final String fullCharacteristicsClassName) { KiePMMLModelFactoryUtils.init(compilationDTO, modelTemplate); final ConstructorDeclaration constructorDeclaration = modelTemplate.getDefaultConstructor().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_DEFAULT_CONSTRUCTOR, modelTemplate.getName()))); final BlockStmt body = constructorDeclaration.getBody(); final ExplicitConstructorInvocationStmt superStatement = CommonCodegenUtils.getExplicitConstructorInvocationStmt(body) .orElseThrow(() -> new KiePMMLException(String.format(MISSING_CONSTRUCTOR_IN_BODY, body))); ClassOrInterfaceType characteristicsClass = parseClassOrInterfaceType(fullCharacteristicsClassName); ObjectCreationExpr characteristicsReference = new ObjectCreationExpr(); characteristicsReference.setType(characteristicsClass); superStatement.setArgument(3, characteristicsReference); superStatement.setArgument(4, getExpressionForObject(compilationDTO.getInitialScore())); superStatement.setArgument(5, getExpressionForObject(compilationDTO.isUseReasonCodes())); REASONCODE_ALGORITHM reasoncodeAlgorithm = compilationDTO.getREASONCODE_ALGORITHM(); NameExpr reasonCodeExpr = new NameExpr(REASONCODE_ALGORITHM.class.getName() + "." + reasoncodeAlgorithm.name()); superStatement.setArgument(6, reasonCodeExpr); superStatement.setArgument(7, getExpressionForObject(compilationDTO.getBaselineScore())); }
@Test void setConstructor() { String fullCharacteristicsClassName = PACKAGE_NAME + ".fullCharacteristicsClassName"; final CommonCompilationDTO<Scorecard> source = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, basicComplexPartialScorePmml, basicComplexPartialScore, new PMMLCompilationContextMock(), BASIC_COMPLEX_PARTIAL_SCORE_SOURCE); KiePMMLScorecardModelFactory.setConstructor(ScorecardCompilationDTO.fromCompilationDTO(source), scorecardTemplate, fullCharacteristicsClassName); final ConstructorDeclaration constructorDeclaration = scorecardTemplate.getDefaultConstructor().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_DEFAULT_CONSTRUCTOR, scorecardTemplate.getName()))); final BlockStmt body = constructorDeclaration.getBody(); final ExplicitConstructorInvocationStmt retrieved = CommonCodegenUtils.getExplicitConstructorInvocationStmt(body) .orElseThrow(() -> new KiePMMLException(String.format(MISSING_CONSTRUCTOR_IN_BODY, body))); Statement expected = JavaParserUtils .parseStatement(String.format("super(\"%1$s\", \"%2$s\", Collections.emptyList()" + ", new %3$s" + "(), %4$s, %5$s, %6$s, %7$s);\n", FILE_NAME_NO_SUFFIX, getSanitizedClassName(basicComplexPartialScore.getModelName()), fullCharacteristicsClassName, basicComplexPartialScore.getInitialScore(), basicComplexPartialScore.isUseReasonCodes(), REASONCODE_ALGORITHM.class.getName() + "." + REASONCODE_ALGORITHM.byName(basicComplexPartialScore.getReasonCodeAlgorithm().value()), basicComplexPartialScore.getBaselineScore() )); assertThat(expected.toString()).isEqualTo(retrieved.toString()); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); }
public OpenAPI read(Class<?> cls) { return read(cls, resolveApplicationPath(), null, false, null, null, new LinkedHashSet<String>(), new ArrayList<Parameter>(), new HashSet<Class<?>>()); }
@Test(description = "Test Schema AdditionalProperties annotations") public void testSchemaAdditionalProperties() { Reader reader = new Reader(new OpenAPI()); OpenAPI openAPI = reader.read(SchemaAdditionalPropertiesResource.class); String yaml = "openapi: 3.0.1\n" + "paths:\n" + " /arraySchemaImpl:\n" + " get:\n" + " operationId: arraySchemaImpl\n" + " responses:\n" + " \"200\":\n" + " description: voila!\n" + " content:\n" + " application/json:\n" + " schema:\n" + " type: object\n" + " additionalProperties:\n" + " type: array\n" + " items:\n" + " $ref: '#/components/schemas/Pet'\n" + " /fromtResponseType:\n" + " get:\n" + " operationId: fromtResponseType\n" + " responses:\n" + " default:\n" + " description: default response\n" + " content:\n" + " '*/*':\n" + " schema:\n" + " type: object\n" + " additionalProperties:\n" + " type: array\n" + " items:\n" + " $ref: '#/components/schemas/Pet'\n" + " /schemaImpl:\n" + " get:\n" + " operationId: schemaImpl\n" + " responses:\n" + " \"200\":\n" + " description: voila!\n" + " content:\n" + " application/json:\n" + " schema:\n" + " type: object\n" + " additionalProperties:\n" + " $ref: '#/components/schemas/Pet'\n" + " /schemaNotImpl:\n" + " get:\n" + " operationId: schemaNotImpl\n" + " responses:\n" + " \"200\":\n" + " description: voila!\n" + " content:\n" + " application/json:\n" + " schema:\n" + " type: object\n" + " additionalProperties:\n" + " $ref: '#/components/schemas/Pet'\n" + "components:\n" + " schemas:\n" + " Pet:\n" + " type: object\n" + " properties:\n" + " foo:\n" + " type: string\n"; SerializationMatchers.assertEqualsToYaml(openAPI, yaml); }
public static String appendQueryParams(HttpServletRequest request, String targetUri) { String ret = targetUri; String urlEncodedQueryString = getURLEncodedQueryString(request, null); if (urlEncodedQueryString != null) { ret += "?" + urlEncodedQueryString; } return ret; }
@Test void testAppendQueryParams() throws Exception { HttpServletRequest request = Mockito.mock(HttpServletRequest.class); String targetUri = "/test/path"; Mockito.when(request.getCharacterEncoding()).thenReturn(null); Map<String, String> paramResultMap = new HashMap<>(); paramResultMap.put("param1=x", targetUri + "?" + "param1=x"); paramResultMap .put("param1=x&param2=y", targetUri + "?" + "param1=x&param2=y"); paramResultMap.put("param1=x&param2=y&param3=x+y", targetUri + "?" + "param1=x&param2=y&param3=x+y"); for (Map.Entry<String, String> entry : paramResultMap.entrySet()) { Mockito.when(request.getQueryString()).thenReturn(entry.getKey()); String uri = WebAppUtils.appendQueryParams(request, targetUri); assertEquals(entry.getValue(), uri); } }
public Exporter getCompatibleExporter(TransferExtension extension, DataVertical jobType) { Exporter<?, ?> exporter = getExporterOrNull(extension, jobType); if (exporter != null) { return exporter; } switch (jobType) { case MEDIA: exporter = getMediaExporter(extension); break; case PHOTOS: exporter = getPhotosExporter(extension); break; case VIDEOS: exporter = getVideosExporter(extension); break; } if (exporter == null) { return extension.getExporter(jobType); // preserve original exception } return exporter; }
@Test public void shouldMaintainOriginalException() { TransferExtension ext = mock(TransferExtension.class); when(ext.getExporter(eq(MEDIA))).thenThrow(new RuntimeException()); assertThrows(Exception.class, () -> compatibilityProvider.getCompatibleExporter(ext, MEDIA)); }