focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public void checkTopicAccess( final KsqlSecurityContext securityContext, final String topicName, final AclOperation operation ) { checkAccess(new CacheKey(securityContext, AuthObjectType.TOPIC, topicName, operation)); }
@Test public void shouldCheckCacheValidatorOnSecondTopicAccessRequest() { // When cache.checkTopicAccess(securityContext, TOPIC_1, AclOperation.READ); when(fakeTicker.read()).thenReturn(ONE_SEC_IN_NS); cache.checkTopicAccess(securityContext, TOPIC_1, AclOperation.READ); // Then verify(backendValidator, times(1)) .checkTopicAccess(securityContext, TOPIC_1, AclOperation.READ); verifyNoMoreInteractions(backendValidator); }
@Override public Map<String, String> getTags() { return hostTags(gitTags(MetricsSupport.applicationTags(applicationModel, getExtraInfo()))); }
@Test void getTags() { ApplicationModel applicationModel = ApplicationModel.defaultModel(); String mockMetrics = "MockMetrics"; applicationModel .getApplicationConfigManager() .setApplication(new org.apache.dubbo.config.ApplicationConfig(mockMetrics)); ApplicationMetric applicationMetric = new ApplicationMetric(applicationModel); Map<String, String> tags = applicationMetric.getTags(); Assertions.assertEquals(tags.get(TAG_IP), getLocalHost()); Assertions.assertEquals(tags.get(TAG_HOSTNAME), getLocalHostName()); Assertions.assertEquals(tags.get(TAG_APPLICATION_NAME), applicationModel.getApplicationName()); Assertions.assertEquals(tags.get(METADATA_GIT_COMMITID_METRIC.getName()), Version.getLastCommitId()); }
public Stream<NoSqlMigration> getMigrations() { NoSqlMigrationProvider migrationProvider = getMigrationProvider(); return getMigrations(migrationProvider); }
@Test void testNoSqlDatabaseMigrationsWhereMigrationsCanBeOverridden() { final NoSqlDatabaseMigrationsProvider databaseCreator = new NoSqlDatabaseMigrationsProvider(asList(MongoDBStorageProvider.class, AmazonDocumentDBStorageProvider.class)); final Stream<NoSqlMigration> databaseSpecificMigrations = databaseCreator.getMigrations(); assertThat(databaseSpecificMigrations) .anyMatch(migration -> migration.getClassName().equals(M001_CreateJobCollection.class.getSimpleName() + ".class")) .anyMatch(this::migration007IsDocumentDBMigration); }
@Private public HistoryClientService getClientService() { return this.clientService; }
@Test (timeout= 50000 ) public void testStartStopServer() throws Exception { historyServer = new JobHistoryServer(); Configuration config = new Configuration(); historyServer.init(config); assertEquals(STATE.INITED, historyServer.getServiceState()); HistoryClientService historyService = historyServer.getClientService(); assertNotNull(historyServer.getClientService()); assertEquals(STATE.INITED, historyService.getServiceState()); historyServer.start(); assertEquals(STATE.STARTED, historyServer.getServiceState()); assertEquals(STATE.STARTED, historyService.getServiceState()); historyServer.stop(); assertEquals(STATE.STOPPED, historyServer.getServiceState()); assertNotNull(historyService.getClientHandler().getConnectAddress()); }
@Override public Map<String, Object> appConfigs() { final Map<String, Object> combined = new HashMap<>(); combined.putAll(config.originals()); combined.putAll(config.values()); return combined; }
@Test public void appConfigsShouldReturnParsedValues() { assertThat( context.appConfigs().get(StreamsConfig.ROCKSDB_CONFIG_SETTER_CLASS_CONFIG), equalTo(RocksDBConfigSetter.class) ); }
@Override public Result apply(ApplyNode applyNode, Captures captures, Context context) { if (applyNode.getMayParticipateInAntiJoin()) { return Result.empty(); } Assignments subqueryAssignments = applyNode.getSubqueryAssignments(); if (subqueryAssignments.size() != 1) { return Result.empty(); } RowExpression expression = getOnlyElement(subqueryAssignments.getExpressions()); if (!(expression instanceof InSubqueryExpression)) { return Result.empty(); } InSubqueryExpression inPredicate = (InSubqueryExpression) expression; VariableReferenceExpression inPredicateOutputVariable = getOnlyElement(subqueryAssignments.getVariables()); PlanNode leftInput = applyNode.getInput(); // Add unique id column if the set of columns do not form a unique key already if (!((GroupReference) leftInput).getLogicalProperties().isPresent() || !((GroupReference) leftInput).getLogicalProperties().get().isDistinct(ImmutableSet.copyOf(leftInput.getOutputVariables()))) { VariableReferenceExpression uniqueKeyVariable = context.getVariableAllocator().newVariable("unique", BIGINT); leftInput = new AssignUniqueId( applyNode.getSourceLocation(), context.getIdAllocator().getNextId(), leftInput, uniqueKeyVariable); } VariableReferenceExpression leftVariableReference = inPredicate.getValue(); VariableReferenceExpression rightVariableReference = inPredicate.getSubquery(); JoinNode innerJoin = new JoinNode( applyNode.getSourceLocation(), context.getIdAllocator().getNextId(), JoinType.INNER, leftInput, applyNode.getSubquery(), ImmutableList.of(new EquiJoinClause( leftVariableReference, rightVariableReference)), ImmutableList.<VariableReferenceExpression>builder() .addAll(leftInput.getOutputVariables()) .build(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of()); AggregationNode distinctNode = new AggregationNode( innerJoin.getSourceLocation(), context.getIdAllocator().getNextId(), innerJoin, ImmutableMap.of(), singleGroupingSet(ImmutableList.<VariableReferenceExpression>builder() .addAll(innerJoin.getOutputVariables()) .build()), ImmutableList.of(), SINGLE, Optional.empty(), Optional.empty(), Optional.empty()); ImmutableList<VariableReferenceExpression> referencedOutputs = ImmutableList.<VariableReferenceExpression>builder() .addAll(applyNode.getInput().getOutputVariables()) .add(inPredicateOutputVariable) .build(); ProjectNode finalProjectNdde = new ProjectNode( context.getIdAllocator().getNextId(), distinctNode, Assignments.builder() .putAll(identityAssignments(distinctNode.getOutputVariables())) .put(inPredicateOutputVariable, TRUE_CONSTANT) .build() .filter(referencedOutputs)); return Result.ofPlanNode(finalProjectNdde); }
@Test public void testDoesNotFireOnNonInPredicateSubquery() { tester().assertThat(new TransformUncorrelatedInPredicateSubqueryToDistinctInnerJoin()) .on(p -> p.apply( assignment(p.variable("x"), new ExistsExpression(Optional.empty(), TRUE_CONSTANT)), emptyList(), p.values(), p.values())) .doesNotFire(); }
private static String encodeWritable(Writable obj) throws IOException { DataOutputBuffer buf = new DataOutputBuffer(); obj.write(buf); Base64 encoder = new Base64(0, null, true); byte[] raw = new byte[buf.getLength()]; System.arraycopy(buf.getData(), 0, raw, 0, buf.getLength()); return encoder.encodeToString(raw); }
@Test public void testEncodeWritable() throws Exception { String[] values = new String[]{"", "a", "bb", "ccc", "dddd", "eeeee", "ffffff", "ggggggg", "hhhhhhhh", "iiiiiiiii", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLM" + "NOPQRSTUVWXYZ01234567890!@#$%^&*()-=_+[]{}|;':,./<>?"}; Token<AbstractDelegationTokenIdentifier> orig; Token<AbstractDelegationTokenIdentifier> copy = new Token<>(); // ensure that for each string the input and output values match for(int i=0; i< values.length; ++i) { String val = values[i]; Token.LOG.info("Input = {}", val); orig = new Token<>(val.getBytes(), val.getBytes(), new Text(val), new Text(val)); String encode = orig.encodeToUrlString(); copy.decodeFromUrlString(encode); assertEquals(orig, copy); checkUrlSafe(encode); } }
@Override public void startAsync() { if (!shouldCheckPreconditions()) { LOG.info("All preconditions passed, skipping precondition server start"); return; } LOG.info("Some preconditions not passed, starting precondition server"); server.start(); }
@Test public void shouldCloseClientsIfPreconditionsPass() { // When: checker.startAsync(); // Then: verify(serviceContext).close(); verify(admin).close(); verify(clientVertx).close(); }
public List<String> toPrefix(String in) { List<String> tokens = buildTokens(alignINClause(in)); List<String> output = new ArrayList<>(); List<String> stack = new ArrayList<>(); for (String token : tokens) { if (isOperand(token)) { if (token.equals(")")) { while (openParanthesesFound(stack)) { output.add(stack.remove(stack.size() - 1)); } if (!stack.isEmpty()) { // temporarily fix for issue #189 stack.remove(stack.size() - 1); } } else { while (openParanthesesFound(stack) && !hasHigherPrecedence(token, stack.get(stack.size() - 1))) { output.add(stack.remove(stack.size() - 1)); } stack.add(token); } } else { output.add(token); } } while (!stack.isEmpty()) { output.add(stack.remove(stack.size() - 1)); } return output; }
@Test public void testIn() { String query = "a and b OR c in ( 4, 5, 6 )"; List<String> list = parser.toPrefix(query); assertEquals(Arrays.asList("a", "b", "and", "c", "4,5,6", "in", "OR"), list); }
public static List<Method> extractValidMethods(Class contract) { return Arrays.stream(contract.getDeclaredMethods()) .filter( m -> !m.isSynthetic() && parametersAreMatching(m) && !m.getName().toLowerCase().contains("event") && !m.getName().equals("load") && !m.getName().equals("kill") && !m.getName().equals("linkLibraries")) .collect(Collectors.toList()); }
@Test public void testThatTheCorrectDeployMethodWasExtracted() { List<Method> filteredMethods = MethodFilter.extractValidMethods(greeterContractClass); List<Method> deployMethod = filteredMethods.stream() .filter(m -> m.getName().equals("deploy")) .collect(Collectors.toList()); List<Class<?>> deployMethodParameterTypes = Arrays.asList(deployMethod.get(0).getParameterTypes()); assertTrue( deployMethodParameterTypes.containsAll( Arrays.asList( Web3j.class, TransactionManager.class, ContractGasProvider.class))); }
@Override public Table create( Schema schema, PartitionSpec spec, SortOrder order, Map<String, String> properties, String location) { return buildTable(location, schema) .withPartitionSpec(spec) .withSortOrder(order) .withProperties(properties) .create(); }
@Test public void testCustomSortOrder() { PartitionSpec spec = PartitionSpec.builderFor(SCHEMA).bucket("data", 16).build(); SortOrder order = SortOrder.builderFor(SCHEMA).asc("id", NULLS_FIRST).build(); Table table = TABLES.create(SCHEMA, spec, order, Maps.newHashMap(), tableDir.toURI().toString()); SortOrder sortOrder = table.sortOrder(); assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(1); assertThat(sortOrder.fields()).as("Order must have 1 field").hasSize(1); assertThat(sortOrder.fields().get(0).direction()).as("Direction must match").isEqualTo(ASC); assertThat(sortOrder.fields().get(0).nullOrder()) .as("Null order must match") .isEqualTo(NULLS_FIRST); Transform<?, ?> transform = Transforms.identity(); assertThat(sortOrder.fields().get(0).transform()) .as("Transform must match") .isEqualTo(transform); }
public final void hasCount(@Nullable Object element, int expectedCount) { checkArgument(expectedCount >= 0, "expectedCount(%s) must be >= 0", expectedCount); int actualCount = checkNotNull(actual).count(element); check("count(%s)", element).that(actualCount).isEqualTo(expectedCount); }
@Test public void hasCount() { ImmutableMultiset<String> multiset = ImmutableMultiset.of("kurt", "kurt", "kluever"); assertThat(multiset).hasCount("kurt", 2); assertThat(multiset).hasCount("kluever", 1); assertThat(multiset).hasCount("alfred", 0); assertWithMessage("name").that(multiset).hasCount("kurt", 2); }
public static boolean isBlank(String str) { return null == str || "".equals(str.trim()); }
@Test public void testIsBlank() { Assert.assertTrue(StringKit.isBlank("")); Assert.assertTrue(StringKit.isBlank(null)); Assert.assertFalse(StringKit.isBlank("a")); Assert.assertFalse(StringKit.isBlank("null")); Assert.assertTrue(StringKit.isNotBlank("a b")); Assert.assertTrue(StringKit.isNotBlank("a")); Assert.assertFalse(StringKit.isNotBlank("")); Assert.assertFalse(StringKit.isNotBlank()); Assert.assertFalse(StringKit.isNotBlank("a", "b", " ")); Assert.assertFalse(StringKit.isNotBlank("a", "b", null)); Assert.assertTrue(StringKit.isNotBlank("a", "b", "c")); Assert.assertTrue(StringKit.isNotBlank("abc", "d ef", "gh i")); Bar bar = new Bar(); StringKit.isNotBlankThen("barName", bar::setName); Assert.assertEquals("barName", bar.getName()); StringKit.notBankAccept("1", Integer::parseInt, bar::setAge); Assert.assertEquals(1, bar.getAge()); StringKit.notBankThen("bar", bar::doSameThing); bar.setName("bar"); Foo foo = new Foo(); String name = StringKit.noNullElseGet(foo::getName, bar::getName); Assert.assertEquals("bar", name); foo.setName("foo"); String fooName = StringKit.noNullElseGet(foo::getName, bar::getName); Assert.assertEquals("foo", fooName); }
public static List<Permission> getDefaultPerms() { List<Permission> permSet = Lists.newArrayList(); permSet.add(new PackagePermission("*", PackagePermission.EXPORTONLY)); permSet.add(new PackagePermission("*", PackagePermission.IMPORT)); permSet.add(new AdaptPermission("*", AdaptPermission.ADAPT)); //permSet.add(new ConfigurationPermission("*", ConfigurationPermission.CONFIGURE)); permSet.add(new AdminPermission("*", AdminPermission.METADATA)); return permSet; }
@Test public void testGetDefaultPerms() { List<Permission> permSet = Lists.newArrayList(); assertTrue(permSet.isEmpty()); permSet.add(new PackagePermission("*", PackagePermission.EXPORTONLY)); permSet.add(new PackagePermission("*", PackagePermission.IMPORT)); permSet.add(new AdaptPermission("*", AdaptPermission.ADAPT)); permSet.add(new ConfigurationPermission("*", ConfigurationPermission.CONFIGURE)); permSet.add(new AdminPermission("*", AdminPermission.METADATA)); assertEquals(5, permSet.size()); }
@JsonCreator public static ModelVersion of(String version) { Preconditions.checkArgument(StringUtils.isNotBlank(version), "Version must not be blank"); return new AutoValue_ModelVersion(version); }
@Test public void ensureVersionIsNotBlank() { assertThatThrownBy(() -> ModelVersion.of(null)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Version must not be blank"); assertThatThrownBy(() -> ModelVersion.of("")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Version must not be blank"); assertThatThrownBy(() -> ModelVersion.of(" \n\r\t")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Version must not be blank"); }
public void checkRepoAndAddWarningIfRequired() { try { if (configRepository.getLooseObjectCount() >= systemEnvironment.get(SystemEnvironment.GO_CONFIG_REPO_GC_LOOSE_OBJECT_WARNING_THRESHOLD)) { String message = "Action required: Run 'git gc' on config.git repo"; String description = "Number of loose objects in your Configuration repository(config.git) has grown beyond " + "the configured threshold. As the size of config repo increases, the config save operations tend to slow down " + "drastically. It is recommended that you run 'git gc' from " + "'&lt;go server installation directory&gt;/db/config.git/' to address this problem. Go can do this " + "automatically on a periodic basis if you enable automatic GC. <a target='_blank' href='" + docsUrl("/advanced_usage/config_repo.html") + "'>read more...</a>"; serverHealthService.update(ServerHealthState.warningWithHtml(message, description, HealthStateType.general(HealthStateScope.forConfigRepo(SCOPE)))); LOGGER.warn("{}:{}", message, description); } else { serverHealthService.removeByScope(HealthStateScope.forConfigRepo(SCOPE)); } } catch (Exception e) { LOGGER.error(e.getMessage(), e); } }
@Test public void shouldAddWarningWhenConfigRepoLooseObjectCountGoesBeyondTheConfiguredThreshold() throws Exception { when(systemEnvironment.get(SystemEnvironment.GO_CONFIG_REPO_GC_LOOSE_OBJECT_WARNING_THRESHOLD)).thenReturn(10L); when(configRepository.getLooseObjectCount()).thenReturn(20L); service.checkRepoAndAddWarningIfRequired(); List<ServerHealthState> healthStates = serverHealthService.logsSortedForScope(HealthStateScope.forConfigRepo("GC")); String message = "Action required: Run 'git gc' on config.git repo"; String description = "Number of loose objects in your Configuration repository(config.git) has grown beyond " + "the configured threshold. As the size of config repo increases, the config save operations tend to slow down " + "drastically. It is recommended that you run 'git gc' from " + "'&lt;go server installation directory&gt;/db/config.git/' to address this problem. Go can do this " + "automatically on a periodic basis if you enable automatic GC. <a target='_blank' href='" + docsUrl("/advanced_usage/config_repo.html") + "'>read more...</a>"; assertThat(healthStates.get(0).getDescription(), is(description)); assertThat(healthStates.get(0).getLogLevel(), is(HealthStateLevel.WARNING)); assertThat(healthStates.get(0).getMessage(), is(message)); }
@Override public double sd() { return Math.sqrt(nu / (nu - 2.0)); }
@Test public void testSd() { System.out.println("sd"); TDistribution instance = new TDistribution(20); instance.rand(); assertEquals(Math.sqrt(10/9.0), instance.sd(), 1E-7); }
TraceContext withExtra(List<Object> extra) { return new TraceContext(flags, traceIdHigh, traceId, localRootId, parentId, spanId, extra); }
@Test void withExtra_notEmpty() { assertThat(context.withExtra(Arrays.asList(1L))) .extracting("extraList") .isEqualTo(Arrays.asList(1L)); }
@Override public JFieldVar apply(String nodeName, JsonNode node, JsonNode parent, JFieldVar field, Schema currentSchema) { if (ruleFactory.getGenerationConfig().isIncludeJsr303Annotations() && (node.has("minItems") || node.has("maxItems")) && isApplicableType(field)) { final Class<? extends Annotation> sizeClass = ruleFactory.getGenerationConfig().isUseJakartaValidation() ? Size.class : javax.validation.constraints.Size.class; JAnnotationUse annotation = field.annotate(sizeClass); if (node.has("minItems")) { annotation.param("min", node.get("minItems").asInt()); } if (node.has("maxItems")) { annotation.param("max", node.get("maxItems").asInt()); } } return field; }
@Test public void testMaxAndMinLengthGenericsOnType() { when(config.isIncludeJsr303Annotations()).thenReturn(true); final int minValue = new Random().nextInt(); final int maxValue = new Random().nextInt(); JsonNode maxSubNode = Mockito.mock(JsonNode.class); when(subNode.asInt()).thenReturn(minValue); when(maxSubNode.asInt()).thenReturn(maxValue); when(node.get("minItems")).thenReturn(subNode); when(node.get("maxItems")).thenReturn(maxSubNode); when(fieldVar.annotate(sizeClass)).thenReturn(annotation); when(node.has("minItems")).thenReturn(true); when(node.has("maxItems")).thenReturn(true); when(fieldVar.type().boxify().fullName()).thenReturn(fieldClass.getTypeName() + "<String>"); JFieldVar result = rule.apply("node", node, null, fieldVar, null); assertSame(fieldVar, result); verify(fieldVar, times(isApplicable ? 1 : 0)).annotate(sizeClass); verify(annotation, times(isApplicable ? 1 : 0)).param("min", minValue); verify(annotation, times(isApplicable ? 1 : 0)).param("max", maxValue); }
public long count(String indexName) throws ElasticsearchResourceManagerException { LOG.info("Fetching count from {}.", indexName); try { // Elasticsearch is near realtime, so refresh will guarantee reads are consistent elasticsearchClient.indices().refresh(new RefreshRequest(indexName), RequestOptions.DEFAULT); return elasticsearchClient .count(new CountRequest(indexName), RequestOptions.DEFAULT) .getCount(); } catch (Exception e) { throw new ElasticsearchResourceManagerException( "Error fetching count from " + indexName + ".", e); } }
@Test public void testCountDocumentsShouldReturnInt() throws IOException { when(elasticsearchClient.count(any(CountRequest.class), eq(RequestOptions.DEFAULT)).getCount()) .thenReturn(100L); assertThat(testManager.count(INDEX_NAME)).isEqualTo(100L); }
public static CharSequence commonSuffix(CharSequence str1, CharSequence str2) { if (isEmpty(str1) || isEmpty(str2)) { return EMPTY; } int str1Index = str1.length() - 1; int str2Index = str2.length() - 1; for (; str1Index >= 0 && str2Index >= 0; str1Index--, str2Index--) { if (str1.charAt(str1Index) != str2.charAt(str2Index)) { break; } } return str1.subSequence(str1Index + 1, str1.length()); }
@Test public void commonSuffixTest() throws Exception{ // -------------------------- None match ----------------------- assertEquals("", CharSequenceUtil.commonSuffix("", "abc")); assertEquals("", CharSequenceUtil.commonSuffix(null, "abc")); assertEquals("", CharSequenceUtil.commonSuffix("abc", null)); assertEquals("", CharSequenceUtil.commonSuffix("abc", "")); assertEquals("", CharSequenceUtil.commonSuffix("zzzja", "zzzjb")); assertEquals("", CharSequenceUtil.commonSuffix("中文english", "中文Korean")); // -------------------------- Matched ----------------------- assertEquals("_name", CharSequenceUtil.commonSuffix("abc_name", "efg_name")); assertEquals("zzzj", CharSequenceUtil.commonSuffix("abczzzj", "efgzzzj")); assertEquals("中文", CharSequenceUtil.commonSuffix("english中文", "Korean中文")); // "abc" + { space * 10 } final String str1 = "abc" + CharSequenceUtil.repeat(CharSequenceUtil.SPACE, 10); // "efg" + { space * 15 } final String str2 = "efg" + CharSequenceUtil.repeat(CharSequenceUtil.SPACE, 15); // Expect common suffix: { space * 10 } assertEquals(CharSequenceUtil.repeat(CharSequenceUtil.SPACE, 10), CharSequenceUtil.commonSuffix(str1, str2)); }
@DeleteMapping("/config") public Result<Boolean> deleteConfig(ConfigInfo configInfo) { if (StringUtils.isEmpty(configInfo.getGroup()) || StringUtils.isEmpty(configInfo.getKey())) { return new Result<>(ResultCodeType.MISS_PARAM.getCode(), ResultCodeType.MISS_PARAM.getMessage()); } Result<List<ConfigInfo>> result = configService.getConfigList(configInfo, PluginType.OTHER, true); if (result.isSuccess() && CollectionUtils.isEmpty(result.getData())) { return new Result<>(ResultCodeType.NOT_EXISTS.getCode(), ResultCodeType.NOT_EXISTS.getMessage()); } return configService.deleteConfig(configInfo); }
@Test public void deleteConfig() { Result<Boolean> result = configController.deleteConfig(configInfo); Assert.assertTrue(result.isSuccess()); Assert.assertTrue(result.getData()); }
@Override public boolean contains(Object o) { checkComparator(); return binarySearch((V) o).getIndex() >= 0; }
@Test public void testContains() { RPriorityQueue<TestObject> set = redisson.getPriorityQueue("set"); set.add(new TestObject("1", "2")); set.add(new TestObject("1", "2")); set.add(new TestObject("2", "3")); set.add(new TestObject("3", "4")); set.add(new TestObject("5", "6")); Assertions.assertTrue(set.contains(new TestObject("2", "3"))); Assertions.assertTrue(set.contains(new TestObject("1", "2"))); Assertions.assertFalse(set.contains(new TestObject("1", "9"))); }
public void ipCheck(EidSession session, String clientIp) { if (session.getClientIpAddress() != null && !session.getClientIpAddress().isEmpty()) { String[] clientIps = clientIp.split(", "); byte[] data = clientIps[0].concat(sourceIpSalt).getBytes(StandardCharsets.UTF_8); String anonimizedIp = Base64.toBase64String(DigestUtils.digest("SHA256").digest(data)); if (!anonimizedIp.equals(session.getClientIpAddress())) { String logMessage = String.format( "Security exception: Browser and Desktop client IP doesn't match: %s expected: %s", anonimizedIp, session.getClientIpAddress()); if (sourceIpCheck) { throw new ClientException(logMessage); } else { logger.warn(logMessage); } } } }
@Test public void testipCheckNotUsbReader() { EidSession session = new EidSession(); assertDoesNotThrow(() -> target.ipCheck(session, "")); }
public static ReplicationTask create(HCatClient client, HCatNotificationEvent event){ if (event == null){ throw new IllegalArgumentException("event should not be null"); } return getFactoryInstance(client).create(client,event); }
@Test public void testCreate() throws HCatException { Table t = new Table(); t.setDbName("testdb"); t.setTableName("testtable"); NotificationEvent event = new NotificationEvent(0, (int)System.currentTimeMillis(), HCatConstants.HCAT_CREATE_TABLE_EVENT, msgFactory.buildCreateTableMessage(t).toString()); event.setDbName(t.getDbName()); event.setTableName(t.getTableName()); ReplicationTask.resetFactory(null); ReplicationTask rtask = ReplicationTask.create(HCatClient.create(new HiveConf()),new HCatNotificationEvent(event)); assertTrue("Provided factory instantiation should yield CreateTableReplicationTask", rtask instanceof CreateTableReplicationTask); ReplicationTask.resetFactory(NoopFactory.class); rtask = ReplicationTask.create(HCatClient.create(new HiveConf()),new HCatNotificationEvent(event)); assertTrue("Provided factory instantiation should yield NoopReplicationTask", rtask instanceof NoopReplicationTask); ReplicationTask.resetFactory(null); }
List<BlockInfo> parseLine(String line) throws IOException { if (currentState == State.DEFAULT) { if (line.contains("<INodeSection>")) { transitionTo(State.INODE_SECTION); } else { return Collections.emptyList(); } } if (line.contains("<inode>")) { transitionTo(State.INODE); } if (line.contains("<type>FILE</type>")) { transitionTo(State.FILE); } List<String> replicationStrings = valuesFromXMLString(line, "replication"); if (!replicationStrings.isEmpty()) { if (replicationStrings.size() > 1) { throw new IOException(String.format("Found %s replication strings", replicationStrings.size())); } transitionTo(State.FILE_WITH_REPLICATION); currentReplication = Short.parseShort(replicationStrings.get(0)); } Matcher blockMatcher = BLOCK_PATTERN.matcher(line); List<BlockInfo> blockInfos = new ArrayList<>(); while (blockMatcher.find()) { if (currentState != State.FILE_WITH_REPLICATION) { throw new IOException( "Found a block string when in state: " + currentState); } long id = Long.parseLong(blockMatcher.group(1)); long gs = Long.parseLong(blockMatcher.group(2)); long size = Long.parseLong(blockMatcher.group(3)); blockInfos.add(new BlockInfo(id, gs, size, currentReplication)); } if (line.contains("</inode>")) { transitionTo(State.INODE_SECTION); } if (line.contains("</INodeSection>")) { transitionTo(State.DEFAULT); } return blockInfos; }
@Test public void testNonInodeSectionIgnored() throws Exception { String[] lines = { "<INodeSection>", "</INodeSection>", "<OtherSection>", "<inode><id>1</id><type>FILE</type><name>fake-file</name>" + "<replication>1</replication>", "<blocks><block><id>2</id><genstamp>1</genstamp>" + "<numBytes>1</numBytes></block>", "</inode>", "<replication>3</replication>", "</OtherSection>" }; XMLParser parser = new XMLParser(); for (String line : lines) { assertTrue((parser.parseLine(line).isEmpty())); } }
public void addPipeline(String groupName, PipelineConfig pipeline) { String sanitizedGroupName = BasicPipelineConfigs.sanitizedGroupName(groupName); if (!this.hasGroup(sanitizedGroupName)) { createNewGroup(sanitizedGroupName, pipeline); return; } for (PipelineConfigs pipelines : this) { if (pipelines.save(pipeline, sanitizedGroupName)) { return; } } }
@Test public void shouldOnlySavePipelineToTargetGroup() { PipelineConfigs defaultGroup = createGroup("defaultGroup", createPipelineConfig("pipeline1", "stage1")); PipelineConfigs defaultGroup2 = createGroup("defaultGroup2", createPipelineConfig("pipeline2", "stage2")); PipelineGroups pipelineGroups = new PipelineGroups(defaultGroup, defaultGroup2); PipelineConfig pipelineConfig = createPipelineConfig("pipeline3", "stage1"); pipelineGroups.addPipeline("defaultGroup", pipelineConfig); assertThat(defaultGroup, hasItem(pipelineConfig)); assertThat(defaultGroup2, not(hasItem(pipelineConfig))); assertThat(pipelineGroups.size(), is(2)); }
@Override public void remove(String path) { path2Invoker.remove(path); }
@Test void testRemove() { Assertions.assertEquals(INVOKER, getInvokerByPath("/abc")); PATH_RESOLVER.remove("/abc"); Assertions.assertNull(getInvokerByPath("/abc")); }
@Override public String formatDate(Locale locale, Date date) { return DateFormat.getDateInstance(DateFormat.DEFAULT, locale).format(date); }
@Test public void format_date() { TimeZone initialTz = TimeZone.getDefault(); TimeZone.setDefault(TimeZone.getTimeZone("GMT+1")); assertThat(underTest.formatDate(Locale.ENGLISH, DateUtils.parseDateTime("2014-01-22T19:10:03+0100"))).isEqualTo("Jan 22, 2014"); TimeZone.setDefault(initialTz); }
@Override protected Map<String, ConfigValue> validateSourceConnectorConfig(SourceConnector connector, ConfigDef configDef, Map<String, String> config) { Map<String, ConfigValue> result = super.validateSourceConnectorConfig(connector, configDef, config); validateSourceConnectorExactlyOnceSupport(config, result, connector); validateSourceConnectorTransactionBoundary(config, result, connector); return result; }
@Test public void testExactlyOnceSourceSupportValidationHandlesConnectorErrorsGracefully() { herder = exactlyOnceHerder(); Map<String, String> config = new HashMap<>(); config.put(SourceConnectorConfig.EXACTLY_ONCE_SUPPORT_CONFIG, REQUIRED.toString()); SourceConnector connectorMock = mock(SourceConnector.class); String errorMessage = "time to add a new unit test :)"; when(connectorMock.exactlyOnceSupport(eq(config))).thenThrow(new NullPointerException(errorMessage)); Map<String, ConfigValue> validatedConfigs = herder.validateSourceConnectorConfig( connectorMock, SourceConnectorConfig.configDef(), config); List<String> errors = validatedConfigs.get(SourceConnectorConfig.EXACTLY_ONCE_SUPPORT_CONFIG).errorMessages(); assertFalse(errors.isEmpty()); assertTrue( errors.get(0).contains(errorMessage), "Error message did not contain expected text: " + errors.get(0)); assertEquals(1, errors.size()); }
public static boolean maybeSyntheticJoinKey(final ColumnName columnName) { return columnName.text().startsWith(SYNTHETIC_JOIN_KEY_COLUMN_PRIFIX); }
@Test public void shouldDetectPossibleSyntheticJoinColumns() { assertThat(ColumnNames.maybeSyntheticJoinKey(ColumnName.of("ROWKEY")), is(true)); assertThat(ColumnNames.maybeSyntheticJoinKey(ColumnName.of("ROWKEY_0")), is(true)); assertThat(ColumnNames.maybeSyntheticJoinKey(ColumnName.of("ROWKEY_1")), is(true)); assertThat(ColumnNames.maybeSyntheticJoinKey(ColumnName.of("Rowkey_2")), is(false)); assertThat(ColumnNames.maybeSyntheticJoinKey(ColumnName.of("other_2")), is(false)); assertThat(ColumnNames.maybeSyntheticJoinKey(ColumnName.of("NotROWKEY_2")), is(false)); }
@Override public void onStart(CeTask ceTask) { // nothing to do }
@Test public void onStart_has_no_effect() { CeTask ceTask = mock(CeTask.class); underTest.onStart(ceTask); verifyNoInteractions(ceTask); }
@Override public DescriptiveUrlBag toUrl(final Path file) { final DescriptiveUrlBag list = new DescriptiveUrlBag(); // In one hour list.add(this.toSignedUrl(file, (int) TimeUnit.HOURS.toSeconds(1))); // Default signed URL expiring in 24 hours. list.add(this.toSignedUrl(file, (int) TimeUnit.SECONDS.toSeconds( new HostPreferences(session.getHost()).getInteger("s3.url.expire.seconds")))); // 1 Week list.add(this.toSignedUrl(file, (int) TimeUnit.DAYS.toSeconds(7))); // 1 Month list.add(this.toSignedUrl(file, (int) TimeUnit.DAYS.toSeconds(30))); // 1 Year list.add(this.toSignedUrl(file, (int) TimeUnit.DAYS.toSeconds(365))); return list; }
@Test public void testGet() throws Exception { final Path container = new Path("cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new Path(container, "f g", EnumSet.of(Path.Type.file)); new AzureTouchFeature(session, null).touch(test, new TransferStatus()); assertEquals(5, new AzureUrlProvider(session).toUrl(test).filter(DescriptiveUrl.Type.signed).size()); new AzureDeleteFeature(session, null).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public ClientDetailsEntity loadClientByClientId(String clientId) throws OAuth2Exception, InvalidClientException, IllegalArgumentException { if (!Strings.isNullOrEmpty(clientId)) { ClientDetailsEntity client = clientRepository.getClientByClientId(clientId); if (client == null) { throw new InvalidClientException("Client with id " + clientId + " was not found"); } else { return client; } } throw new IllegalArgumentException("Client id must not be empty!"); }
@Test public void loadClientByClientId_badId() { // null id try { service.loadClientByClientId(null); fail("Null client id. Expected an IllegalArgumentException."); } catch (IllegalArgumentException e) { assertThat(e, is(notNullValue())); } // empty id try { service.loadClientByClientId(""); fail("Empty client id. Expected an IllegalArgumentException."); } catch (IllegalArgumentException e) { assertThat(e, is(notNullValue())); } // id not found String clientId = "b00g3r"; Mockito.when(clientRepository.getClientByClientId(clientId)).thenReturn(null); try { service.loadClientByClientId(clientId); fail("Client id not found. Expected an InvalidClientException."); } catch (InvalidClientException e) { assertThat(e, is(notNullValue())); } }
@Override public void start(final PluginConfiguration pluginConfig, final boolean isEnhancedForProxy) { PluginContext.getInstance().setEnhancedForProxy(isEnhancedForProxy); pluginConfig.getProps().forEach((key, value) -> setSystemProperty(String.valueOf(key), String.valueOf(value))); OpenTelemetrySdk openTelemetrySdk = AutoConfiguredOpenTelemetrySdk.initialize().getOpenTelemetrySdk(); openTelemetrySdk.getTracer(OpenTelemetryConstants.TRACER_NAME); }
@Test void assertStart() { pluginLifecycleService.start(new PluginConfiguration(null, 0, null, PropertiesBuilder.build(new Property("otel.resource.attributes", "service.name=shardingsphere-agent"), new Property("otel.traces.exporter", "zipkin"))), true); assertNotNull(GlobalOpenTelemetry.getTracerProvider()); assertNotNull(GlobalOpenTelemetry.getTracer("shardingsphere-agent")); }
public String table(TableIdentifier ident) { return SLASH.join( "v1", prefix, "namespaces", RESTUtil.encodeNamespace(ident.namespace()), "tables", RESTUtil.encodeString(ident.name())); }
@Test public void testTableWithMultipartNamespace() { TableIdentifier ident = TableIdentifier.of("n", "s", "table"); assertThat(withPrefix.table(ident)).isEqualTo("v1/ws/catalog/namespaces/n%1Fs/tables/table"); assertThat(withoutPrefix.table(ident)).isEqualTo("v1/namespaces/n%1Fs/tables/table"); }
@Override public CompletableFuture<Long> getEarliestMessageTimeAsync(String topic, int queueId) { long nextEarliestMessageTime = next.getEarliestMessageTime(topic, queueId); long finalNextEarliestMessageTime = nextEarliestMessageTime > 0 ? nextEarliestMessageTime : Long.MAX_VALUE; Stopwatch stopwatch = Stopwatch.createStarted(); return fetcher.getEarliestMessageTimeAsync(topic, queueId) .thenApply(time -> { Attributes latencyAttributes = TieredStoreMetricsManager.newAttributesBuilder() .put(TieredStoreMetricsConstant.LABEL_OPERATION, TieredStoreMetricsConstant.OPERATION_API_GET_EARLIEST_MESSAGE_TIME) .put(TieredStoreMetricsConstant.LABEL_TOPIC, topic) .build(); TieredStoreMetricsManager.apiLatency.record(stopwatch.elapsed(TimeUnit.MILLISECONDS), latencyAttributes); if (time < 0) { log.debug("GetEarliestMessageTimeAsync failed, try to get earliest message time from next store: topic: {}, queue: {}", topic, queueId); return finalNextEarliestMessageTime != Long.MAX_VALUE ? finalNextEarliestMessageTime : -1; } return Math.min(finalNextEarliestMessageTime, time); }); }
@Test public void testGetEarliestMessageTimeAsync() { when(fetcher.getEarliestMessageTimeAsync(anyString(), anyInt())).thenReturn(CompletableFuture.completedFuture(1L)); Assert.assertEquals(1, (long) currentStore.getEarliestMessageTimeAsync(mq.getTopic(), mq.getQueueId()).join()); when(fetcher.getEarliestMessageTimeAsync(anyString(), anyInt())).thenReturn(CompletableFuture.completedFuture(-1L)); when(defaultStore.getEarliestMessageTime(anyString(), anyInt())).thenReturn(2L); Assert.assertEquals(2, (long) currentStore.getEarliestMessageTimeAsync(mq.getTopic(), mq.getQueueId()).join()); }
public short toShort() { int s = (stickyBit ? 1 << 9 : 0) | (useraction.ordinal() << 6) | (groupaction.ordinal() << 3) | otheraction.ordinal(); return (short)s; }
@Test public void testConvertingPermissions() { for(short s = 0; s <= 01777; s++) { assertEquals(s, new FsPermission(s).toShort()); // check string formats assertEquals(s, new FsPermission(String.format("%03o", s)).toShort()); } short s = 0; for(boolean sb : new boolean [] { false, true }) { for(FsAction u : FsAction.values()) { for(FsAction g : FsAction.values()) { for(FsAction o : FsAction.values()) { // Cover constructor with sticky bit. FsPermission f = new FsPermission(u, g, o, sb); assertEquals(s, f.toShort()); FsPermission f2 = new FsPermission(f); assertEquals(s, f2.toShort()); s++; } } } } assertEquals(02000, s); }
public static void addMemTableFlushPending(final StreamsMetricsImpl streamsMetrics, final RocksDBMetricContext metricContext, final Gauge<BigInteger> valueProvider) { addMutableMetric( streamsMetrics, metricContext, valueProvider, MEMTABLE_FLUSH_PENDING, MEMTABLE_FLUSH_PENDING_DESCRIPTION ); }
@Test public void shouldAddMemTableFlushPendingMetric() { final String name = "mem-table-flush-pending"; final String description = "Reports 1 if a memtable flush is pending, otherwise it reports 0"; runAndVerifyMutableMetric( name, description, () -> RocksDBMetrics.addMemTableFlushPending(streamsMetrics, ROCKSDB_METRIC_CONTEXT, VALUE_PROVIDER) ); }
@Override @NonNull public String getId() { return ID; }
@Test public void shouldCreateCredentialsWithDefaultId() throws Exception { User user = login("Ken", "Create", "ken@create.item"); MockAuthorizationStrategy a = new MockAuthorizationStrategy(); a.grant(Jenkins.READ, Item.CREATE).everywhere().to(user.getId()); j.jenkins.setAuthorizationStrategy(a); String scmPath = "/organizations/" + getOrgName() + "/scm/git/"; // First create a credential String scmValidatePath = scmPath + "validate"; // We're relying on github letting you do a git-ls for repos with bad creds so long as they're public Map params = MapsHelper.of( "userName", "someguy", "password", "password", "repositoryUrl", HTTPS_GITHUB_PUBLIC ); Map resp = new RequestBuilder(baseUrl) .status(200) .jwtToken(getJwtToken(j.jenkins,user.getId(), user.getId())) .crumb( crumb ) .data(params) .post(scmValidatePath) .build(Map.class); assertEquals("ok", resp.get("status")); // Now get the default credentialId String repoPath = scmPath + "?repositoryUrl=" + HTTPS_GITHUB_PUBLIC; Map resp2 = new RequestBuilder(baseUrl) .status(200) .jwtToken(getJwtToken(j.jenkins,user.getId(), user.getId())) .crumb( crumb ) .get(repoPath) .build(Map.class); assertEquals("git:" + HTTPS_GITHUB_PUBLIC_HASH, resp2.get("credentialId")); }
public static List<HttpCookie> parseCookies(String cookieStr) { if (StrUtil.isBlank(cookieStr)) { return Collections.emptyList(); } return HttpCookie.parse(cookieStr); }
@Test public void parseCookiesTest(){ final String cookieStr = "cookieName=\"cookieValue\";Path=\"/\";Domain=\"cookiedomain.com\""; final List<HttpCookie> httpCookies = NetUtil.parseCookies(cookieStr); assertEquals(1, httpCookies.size()); final HttpCookie httpCookie = httpCookies.get(0); assertEquals(0, httpCookie.getVersion()); assertEquals("cookieName", httpCookie.getName()); assertEquals("cookieValue", httpCookie.getValue()); assertEquals("/", httpCookie.getPath()); assertEquals("cookiedomain.com", httpCookie.getDomain()); }
@Override public String grantAuthorizationCodeForCode(Long userId, Integer userType, String clientId, List<String> scopes, String redirectUri, String state) { return oauth2CodeService.createAuthorizationCode(userId, userType, clientId, scopes, redirectUri, state).getCode(); }
@Test public void testGrantAuthorizationCodeForCode() { // 准备参数 Long userId = randomLongId(); Integer userType = randomEle(UserTypeEnum.values()).getValue(); String clientId = randomString(); List<String> scopes = Lists.newArrayList("read", "write"); String redirectUri = randomString(); String state = randomString(); // mock 方法 OAuth2CodeDO codeDO = randomPojo(OAuth2CodeDO.class); when(oauth2CodeService.createAuthorizationCode(eq(userId), eq(userType), eq(clientId), eq(scopes), eq(redirectUri), eq(state))).thenReturn(codeDO); // 调用,并断言 assertEquals(codeDO.getCode(), oauth2GrantService.grantAuthorizationCodeForCode(userId, userType, clientId, scopes, redirectUri, state)); }
public void resetPollTimer(final long pollMs) { if (pollTimer.isExpired()) { logger.debug("Poll timer has been reset after it had expired"); shareMembershipManager.maybeRejoinStaleMember(); } pollTimer.update(pollMs); pollTimer.reset(maxPollIntervalMs); }
@Test public void testPollTimerExpiration() { heartbeatRequestManager = createHeartbeatRequestManager( coordinatorRequestManager, membershipManager, heartbeatState, heartbeatRequestState, backgroundEventHandler); when(membershipManager.shouldSkipHeartbeat()).thenReturn(false); // On poll timer expiration, the member should send a last heartbeat to leave the group // and notify the membership manager time.sleep(DEFAULT_MAX_POLL_INTERVAL_MS); assertHeartbeat(heartbeatRequestManager, DEFAULT_HEARTBEAT_INTERVAL_MS); verify(membershipManager).transitionToSendingLeaveGroup(true); verify(heartbeatState).reset(); verify(heartbeatRequestState).reset(); verify(membershipManager).onHeartbeatRequestGenerated(); when(membershipManager.shouldSkipHeartbeat()).thenReturn(true); assertNoHeartbeat(heartbeatRequestManager); heartbeatRequestManager.resetPollTimer(time.milliseconds()); assertTrue(pollTimer.notExpired()); verify(membershipManager).maybeRejoinStaleMember(); when(membershipManager.shouldSkipHeartbeat()).thenReturn(false); assertHeartbeat(heartbeatRequestManager, DEFAULT_HEARTBEAT_INTERVAL_MS); }
@Override public void setValueMetaList( List<ValueMetaInterface> valueMetaList ) { lock.writeLock().lock(); try { this.valueMetaList = valueMetaList; this.cache.invalidate(); for ( int i = 0, len = valueMetaList.size(); i < len; i++ ) { ValueMetaInterface valueMeta = valueMetaList.get( i ); cache.storeMapping( valueMeta.getName(), i ); } this.needRealClone = null; } finally { lock.writeLock().unlock(); } }
@Test public void testSetValueMetaList() throws KettlePluginException { List<ValueMetaInterface> setList = this.generateVList( new String[] { "alpha", "bravo" }, new int[] { 2, 2 } ); rowMeta.setValueMetaList( setList ); assertTrue( setList.contains( rowMeta.searchValueMeta( "alpha" ) ) ); assertTrue( setList.contains( rowMeta.searchValueMeta( "bravo" ) ) ); // check that it is avalable by index: assertEquals( 0, rowMeta.indexOfValue( "alpha" ) ); assertEquals( 1, rowMeta.indexOfValue( "bravo" ) ); }
public static Type getExactSupertype(Type type, Class<?> rawSupertype) { return uncapture(GenericTypeReflector.getExactSuperType(type, rawSupertype)); }
@Test public void getExactSupertype() { assertThat( Reflection.getExactSupertype( Types.parameterizedType(ArrayList.class, Person.class), Collection.class)) .isEqualTo(Types.parameterizedType(Collection.class, Person.class)); }
public static StatementExecutorResponse execute( final ConfiguredStatement<Explain> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { return StatementExecutorResponse.handled(Optional .of(ExplainExecutor.explain( serviceContext, statement, executionContext, sessionProperties))); }
@Test public void shouldExplainStatement() { // Given: engine.givenSource(DataSourceType.KSTREAM, "Y"); final String statementText = "SELECT * FROM Y EMIT CHANGES;"; final ConfiguredStatement<?> explain = engine.configure("EXPLAIN " + statementText); // When: final QueryDescriptionEntity query = (QueryDescriptionEntity) CustomExecutors.EXPLAIN.execute( explain, sessionProperties, engine.getEngine(), engine.getServiceContext() ).getEntity().orElseThrow(IllegalStateException::new); // Then: assertThat(query.getQueryDescription().getStatementText(), equalTo(statementText)); assertThat(query.getQueryDescription().getSources(), containsInAnyOrder("Y")); assertThat(query.getQueryDescription().getKsqlHostQueryStatus(), equalTo(Collections.emptyMap())); }
@PATCH @Path("/{connector}/offsets") @Operation(summary = "Alter the offsets for the specified connector") public Response alterConnectorOffsets(final @Parameter(hidden = true) @QueryParam("forward") Boolean forward, final @Context HttpHeaders headers, final @PathParam("connector") String connector, final ConnectorOffsets offsets) throws Throwable { if (offsets.offsets() == null || offsets.offsets().isEmpty()) { throw new BadRequestException("Partitions / offsets need to be provided for an alter offsets request"); } FutureCallback<Message> cb = new FutureCallback<>(); herder.alterConnectorOffsets(connector, offsets.toMap(), cb); Message msg = requestHandler.completeOrForwardRequest(cb, "/connectors/" + connector + "/offsets", "PATCH", headers, offsets, new TypeReference<Message>() { }, new IdentityTranslator<>(), forward); return Response.ok().entity(msg).build(); }
@Test public void testAlterOffsets() throws Throwable { Map<String, ?> partition = Collections.singletonMap("partitionKey", "partitionValue"); Map<String, ?> offset = Collections.singletonMap("offsetKey", "offsetValue"); ConnectorOffset connectorOffset = new ConnectorOffset(partition, offset); ConnectorOffsets body = new ConnectorOffsets(Collections.singletonList(connectorOffset)); final ArgumentCaptor<Callback<Message>> cb = ArgumentCaptor.forClass(Callback.class); Message msg = new Message("The offsets for this connector have been altered successfully"); doAnswer(invocation -> { cb.getValue().onCompletion(null, msg); return null; }).when(herder).alterConnectorOffsets(eq(CONNECTOR_NAME), eq(body.toMap()), cb.capture()); Response response = connectorsResource.alterConnectorOffsets(null, NULL_HEADERS, CONNECTOR_NAME, body); assertEquals(200, response.getStatus()); assertEquals(msg, response.getEntity()); }
public static Class getClassFromColumn(EtlJobConfig.EtlColumn column) throws SparkDppException { switch (column.columnType) { case "BOOLEAN": return Boolean.class; case "TINYINT": case "SMALLINT": return Short.class; case "INT": return Integer.class; case "DATETIME": return Timestamp.class; case "BIGINT": return Long.class; case "LARGEINT": throw new SparkDppException("LARGEINT is not supported now"); case "FLOAT": return Float.class; case "DOUBLE": return Double.class; case "DATE": return Date.class; case "HLL": case "CHAR": case "VARCHAR": case "BITMAP": case "OBJECT": case "PERCENTILE": return String.class; case "DECIMALV2": case "DECIMAL32": case "DECIMAL64": case "DECIMAL128": return BigDecimal.valueOf(column.precision, column.scale).getClass(); default: return String.class; } }
@Test public void testGetClassFromColumn() { DppUtils dppUtils = new DppUtils(); try { EtlJobConfig.EtlColumn column = new EtlJobConfig.EtlColumn(); column.columnType = "CHAR"; Class charResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(String.class, charResult); column.columnType = "HLL"; Class hllResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(String.class, hllResult); column.columnType = "OBJECT"; Class objectResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(String.class, objectResult); column.columnType = "BOOLEAN"; Class booleanResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(Boolean.class, booleanResult); column.columnType = "TINYINT"; Class tinyResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(Short.class, tinyResult); column.columnType = "SMALLINT"; Class smallResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(Short.class, smallResult); column.columnType = "INT"; Class integerResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(Integer.class, integerResult); column.columnType = "DATETIME"; Class datetimeResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(java.sql.Timestamp.class, datetimeResult); column.columnType = "FLOAT"; Class floatResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(Float.class, floatResult); column.columnType = "DOUBLE"; Class doubleResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(Double.class, doubleResult); column.columnType = "DATE"; Class dateResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(Date.class, dateResult); column.columnType = "DECIMALV2"; column.precision = 10; column.scale = 2; Class decimalResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(BigDecimal.valueOf(10, 2).getClass(), decimalResult); column.columnType = "DECIMAL32"; column.precision = 7; column.scale = 2; decimalResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(BigDecimal.valueOf(7, 2).getClass(), decimalResult); column.columnType = "DECIMAL64"; column.precision = 15; column.scale = 3; decimalResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(BigDecimal.valueOf(15, 3).getClass(), decimalResult); column.columnType = "DECIMAL128"; column.precision = 34; column.scale = 4; decimalResult = dppUtils.getClassFromColumn(column); Assert.assertEquals(BigDecimal.valueOf(34, 4).getClass(), decimalResult); } catch (Exception e) { Assert.assertFalse(false); } }
@Override public <InputT> TransformEvaluator<InputT> forApplication( AppliedPTransform<?, ?, ?> application, CommittedBundle<?> inputBundle) { @SuppressWarnings({"cast", "unchecked", "rawtypes"}) TransformEvaluator<InputT> evaluator = createEvaluator((AppliedPTransform) application); return evaluator; }
@Test public void testInMemoryEvaluator() throws Exception { KV<String, Integer> firstFoo = KV.of("foo", -1); KV<String, Integer> secondFoo = KV.of("foo", 1); KV<String, Integer> thirdFoo = KV.of("foo", 3); KV<String, Integer> firstBar = KV.of("bar", 22); KV<String, Integer> secondBar = KV.of("bar", 12); KV<String, Integer> firstBaz = KV.of("baz", Integer.MAX_VALUE); PCollection<KV<String, Integer>> values = p.apply(Create.of(firstFoo, firstBar, secondFoo, firstBaz, secondBar, thirdFoo)); PCollection<KeyedWorkItem<String, Integer>> groupedKvs = values.apply(new DirectGroupByKeyOnly<>()); CommittedBundle<KV<String, Integer>> inputBundle = bundleFactory.createBundle(values).commit(Instant.now()); EvaluationContext evaluationContext = mock(EvaluationContext.class); StructuralKey<String> fooKey = StructuralKey.of("foo", StringUtf8Coder.of()); UncommittedBundle<KeyedWorkItem<String, Integer>> fooBundle = bundleFactory.createKeyedBundle(fooKey, groupedKvs); StructuralKey<String> barKey = StructuralKey.of("bar", StringUtf8Coder.of()); UncommittedBundle<KeyedWorkItem<String, Integer>> barBundle = bundleFactory.createKeyedBundle(barKey, groupedKvs); StructuralKey<String> bazKey = StructuralKey.of("baz", StringUtf8Coder.of()); UncommittedBundle<KeyedWorkItem<String, Integer>> bazBundle = bundleFactory.createKeyedBundle(bazKey, groupedKvs); when(evaluationContext.createKeyedBundle(fooKey, groupedKvs)).thenReturn(fooBundle); when(evaluationContext.createKeyedBundle(barKey, groupedKvs)).thenReturn(barBundle); when(evaluationContext.createKeyedBundle(bazKey, groupedKvs)).thenReturn(bazBundle); // The input to a GroupByKey is assumed to be a KvCoder @SuppressWarnings("unchecked") Coder<String> keyCoder = ((KvCoder<String, Integer>) values.getCoder()).getKeyCoder(); TransformEvaluator<KV<String, Integer>> evaluator = new GroupByKeyOnlyEvaluatorFactory(evaluationContext) .forApplication(DirectGraphs.getProducer(groupedKvs), inputBundle); evaluator.processElement(WindowedValue.valueInGlobalWindow(firstFoo)); evaluator.processElement(WindowedValue.valueInGlobalWindow(secondFoo)); evaluator.processElement(WindowedValue.valueInGlobalWindow(thirdFoo)); evaluator.processElement(WindowedValue.valueInGlobalWindow(firstBar)); evaluator.processElement(WindowedValue.valueInGlobalWindow(secondBar)); evaluator.processElement(WindowedValue.valueInGlobalWindow(firstBaz)); evaluator.finishBundle(); assertThat( fooBundle.commit(Instant.now()).getElements(), contains( new KeyedWorkItemMatcher<>( KeyedWorkItems.elementsWorkItem( "foo", ImmutableSet.of( WindowedValue.valueInGlobalWindow(-1), WindowedValue.valueInGlobalWindow(1), WindowedValue.valueInGlobalWindow(3))), keyCoder))); assertThat( barBundle.commit(Instant.now()).getElements(), contains( new KeyedWorkItemMatcher<>( KeyedWorkItems.elementsWorkItem( "bar", ImmutableSet.of( WindowedValue.valueInGlobalWindow(12), WindowedValue.valueInGlobalWindow(22))), keyCoder))); assertThat( bazBundle.commit(Instant.now()).getElements(), contains( new KeyedWorkItemMatcher<>( KeyedWorkItems.elementsWorkItem( "baz", ImmutableSet.of(WindowedValue.valueInGlobalWindow(Integer.MAX_VALUE))), keyCoder))); }
@DeleteMapping @Secured(action = ActionTypes.WRITE, signType = SignType.CONFIG) public Result<Boolean> deleteConfig(HttpServletRequest request, @RequestParam("dataId") String dataId, @RequestParam("group") String group, @RequestParam(value = "namespaceId", required = false, defaultValue = StringUtils.EMPTY) String namespaceId, @RequestParam(value = "tag", required = false) String tag) throws NacosException { //fix issue #9783 namespaceId = NamespaceUtil.processNamespaceParameter(namespaceId); // check namespaceId ParamUtils.checkTenantV2(namespaceId); ParamUtils.checkParam(dataId, group, "datumId", "rm"); ParamUtils.checkParamV2(tag); String clientIp = RequestUtil.getRemoteIp(request); String srcUser = RequestUtil.getSrcUserName(request); return Result.success(configOperationService.deleteConfig(dataId, group, namespaceId, tag, clientIp, srcUser)); }
@Test void testDeleteConfig() throws Exception { MockHttpServletRequest request = new MockHttpServletRequest(); when(configOperationService.deleteConfig(eq(TEST_DATA_ID), eq(TEST_GROUP), eq(TEST_NAMESPACE_ID), eq(TEST_TAG), any(), any())).thenReturn(true); Result<Boolean> booleanResult = configControllerV2.deleteConfig(request, TEST_DATA_ID, TEST_GROUP, TEST_NAMESPACE_ID, TEST_TAG); verify(configOperationService).deleteConfig(eq(TEST_DATA_ID), eq(TEST_GROUP), eq(TEST_NAMESPACE_ID), eq(TEST_TAG), any(), any()); assertEquals(ErrorCode.SUCCESS.getCode(), booleanResult.getCode()); assertTrue(booleanResult.getData()); }
public static <T, E extends Exception> Consumer<T> rethrowConsumer(ConsumerWithExceptions<T, E> consumer) throws E { return t -> { try { consumer.accept(t); } catch (Exception exception) { throwAsUnchecked(exception); } }; }
@Test public void test_Consumer_with_checked_exceptions() throws ClassNotFoundException { Stream.of("java.lang.Object", "java.lang.Integer", "java.lang.String") .forEach(rethrowConsumer(className -> System.out.println(Class.forName(className)))); Stream.of("java.lang.Object", "java.lang.Integer", "java.lang.String") .forEach(rethrowConsumer(System.out::println)); }
public Optional<String> getDatabaseName() { Preconditions.checkState(databaseNames.size() <= 1, "Can not support multiple different database."); return databaseNames.isEmpty() ? Optional.empty() : Optional.of(databaseNames.iterator().next()); }
@Test void assertGetSchemaNameWithSameSchemaAndDifferentTable() { SimpleTableSegment tableSegment1 = createTableSegment("table_1", "tbl_1"); tableSegment1.setOwner(new OwnerSegment(0, 0, new IdentifierValue("sharding_db_1"))); SimpleTableSegment tableSegment2 = createTableSegment("table_2", "tbl_2"); tableSegment2.setOwner(new OwnerSegment(0, 0, new IdentifierValue("sharding_db_1"))); TablesContext tablesContext = new TablesContext(Arrays.asList(tableSegment1, tableSegment2), TypedSPILoader.getService(DatabaseType.class, "FIXTURE"), DefaultDatabase.LOGIC_NAME); assertTrue(tablesContext.getDatabaseName().isPresent()); assertThat(tablesContext.getDatabaseName().get(), is("sharding_db_1")); }
@Override public void save(MetricEntity entity) { if (entity == null || StringUtil.isBlank(entity.getApp())) { return; } readWriteLock.writeLock().lock(); try { allMetrics.computeIfAbsent(entity.getApp(), e -> new HashMap<>(16)) .computeIfAbsent(entity.getResource(), e -> new LinkedHashMap<Long, MetricEntity>() { @Override protected boolean removeEldestEntry(Entry<Long, MetricEntity> eldest) { // Metric older than {@link #MAX_METRIC_LIVE_TIME_MS} will be removed. return eldest.getKey() < TimeUtil.currentTimeMillis() - MAX_METRIC_LIVE_TIME_MS; } }).put(entity.getTimestamp().getTime(), entity); } finally { readWriteLock.writeLock().unlock(); } }
@Test public void testSave() { MetricEntity entry = new MetricEntity(); entry.setApp("testSave"); entry.setResource("testResource"); entry.setTimestamp(new Date(System.currentTimeMillis())); entry.setPassQps(1L); entry.setExceptionQps(1L); entry.setBlockQps(0L); entry.setSuccessQps(1L); inMemoryMetricsRepository.save(entry); List<String> resources = inMemoryMetricsRepository.listResourcesOfApp("testSave"); Assert.assertTrue(resources.size() == 1 && "testResource".equals(resources.get(0))); }
public static SinkConfig validateUpdate(SinkConfig existingConfig, SinkConfig newConfig) { SinkConfig mergedConfig = clone(existingConfig); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Sink Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getSourceSubscriptionName()) && !newConfig.getSourceSubscriptionName() .equals(existingConfig.getSourceSubscriptionName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().putIfAbsent(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getTopicToSerdeClassName() != null) { newConfig.getTopicToSerdeClassName().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getTopicToSchemaType() != null) { newConfig.getTopicToSchemaType().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { SinkConfig finalMergedConfig = mergedConfig; newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException( "isRegexPattern for input topic " + topicName + " cannot be altered"); } finalMergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees() .equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getConfigs() != null) { mergedConfig.setConfigs(newConfig.getConfigs()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering() .equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering() .equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getResources() != null) { mergedConfig .setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getArchive())) { mergedConfig.setArchive(newConfig.getArchive()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } if (newConfig.getTransformFunction() != null) { mergedConfig.setTransformFunction(newConfig.getTransformFunction()); } if (newConfig.getTransformFunctionClassName() != null) { mergedConfig.setTransformFunctionClassName(newConfig.getTransformFunctionClassName()); } if (newConfig.getTransformFunctionConfig() != null) { mergedConfig.setTransformFunctionConfig(newConfig.getTransformFunctionConfig()); } return mergedConfig; }
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "isRegexPattern for input topic test-input cannot be altered") public void testMergeDifferentInputSpecWithRegexChange() { SinkConfig sinkConfig = createSinkConfig(); Map<String, ConsumerConfig> inputSpecs = new HashMap<>(); inputSpecs.put("test-input", ConsumerConfig.builder().isRegexPattern(false).serdeClassName("my-serde").build()); SinkConfig newSinkConfig = createUpdatedSinkConfig("inputSpecs", inputSpecs); SinkConfigUtils.validateUpdate(sinkConfig, newSinkConfig); }
public static Config getConfig( Configuration configuration, @Nullable HostAndPort externalAddress) { return getConfig( configuration, externalAddress, null, PekkoUtils.getForkJoinExecutorConfig( ActorSystemBootstrapTools.getForkJoinExecutorConfiguration(configuration))); }
@Test void getConfigDefaultsToLocalHost() throws UnknownHostException { final Config config = PekkoUtils.getConfig(new Configuration(), new HostAndPort("", 0)); final String hostname = config.getString("pekko.remote.classic.netty.tcp.hostname"); assertThat(InetAddress.getByName(hostname).isLoopbackAddress()).isTrue(); }
public static <T extends Throwable> void runWithContextClassLoader( ThrowingRunnable<T> runnable, ClassLoader contextClassLoader) throws T { try (TemporaryClassLoaderContext ignored = TemporaryClassLoaderContext.of(contextClassLoader)) { runnable.run(); } }
@Test void testRunSupplierWithContextClassLoader() throws Exception { SupplierWithException<ClassLoader, Exception> runnable = () -> Thread.currentThread().getContextClassLoader(); final ClassLoader contextClassLoader = ClassLoadingUtils.runWithContextClassLoader(runnable, TEST_CLASS_LOADER); assertThat(contextClassLoader).isSameAs(TEST_CLASS_LOADER); }
public static String rightPad( String ret, int limit ) { if ( ret == null ) { return rightPad( new StringBuilder(), limit ); } else { return rightPad( new StringBuilder( ret ), limit ); } }
@Test public void testRightPad() { final String s = "Pad me baby one more time"; assertEquals( " ", Const.rightPad( (String) null, 5 ) ); assertEquals( "Pad", Const.rightPad( s, 3 ) ); final StringBuffer sb = new StringBuffer( s ); assertEquals( s + " ", Const.rightPad( sb, 28 ) ); assertEquals( "Pad me baby", Const.rightPad( sb, 11 ) ); final StringBuilder sb2 = new StringBuilder( s ); assertEquals( s + " ", Const.rightPad( sb2, 28 ) ); assertEquals( "Pad me baby", Const.rightPad( sb2, 11 ) ); }
@Override public synchronized void cleanupAll() { try { if (usingStaticInstance) { if (databaseAdminClient != null) { Failsafe.with(retryOnQuotaException()) .run(() -> databaseAdminClient.dropDatabase(instanceId, databaseId)); } } else { LOG.info("Deleting instance {}...", instanceId); if (instanceAdminClient != null) { Failsafe.with(retryOnQuotaException()) .run(() -> instanceAdminClient.deleteInstance(instanceId)); } hasInstance = false; } hasDatabase = false; } catch (SpannerException e) { throw new SpannerResourceManagerException("Failed to delete instance.", e); } finally { if (!spanner.isClosed()) { spanner.close(); } } LOG.info("Manager successfully cleaned up."); }
@Test public void testCleanupAllShouldWorkWhenSpannerDeleteInstanceSucceeds() { // arrange doNothing().when(instanceAdminClient).deleteInstance(any()); when(spanner.getInstanceAdminClient()).thenReturn(instanceAdminClient); testManager = new SpannerResourceManager( spanner, TEST_ID, PROJECT_ID, REGION, DIALECT, false, null, NODE_COUNT); // act testManager.cleanupAll(); // assert verify(spanner.getInstanceAdminClient()).deleteInstance(any()); verify(spanner).close(); }
@Override public void initialize(ServiceConfiguration config) throws IOException { String data = config.getProperties().getProperty(CONF_PULSAR_PROPERTY_KEY); if (StringUtils.isEmpty(data)) { data = System.getProperty(CONF_SYSTEM_PROPERTY_KEY); } if (StringUtils.isEmpty(data)) { throw new IOException("No basic authentication config provided"); } @Cleanup BufferedReader reader = null; try { byte[] bytes = readData(data); reader = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(bytes))); } catch (Exception e) { throw new IllegalArgumentException(e); } users = new HashMap<>(); for (String line : reader.lines().toArray(s -> new String[s])) { List<String> splitLine = Arrays.asList(line.split(":")); if (splitLine.size() != 2) { throw new IOException("The format of the password auth conf file is invalid"); } users.put(splitLine.get(0), splitLine.get(1)); } }
@Test public void testLoadBase64FromPulsarProperties() throws Exception { @Cleanup AuthenticationProviderBasic provider = new AuthenticationProviderBasic(); ServiceConfiguration serviceConfiguration = new ServiceConfiguration(); Properties properties = new Properties(); properties.setProperty("basicAuthConf", basicAuthConfBase64); serviceConfiguration.setProperties(properties); provider.initialize(serviceConfiguration); testAuthenticate(provider); }
public CharSequence format(Monetary monetary) { // determine maximum number of decimals that can be visible in the formatted string // (if all decimal groups were to be used) int max = minDecimals; if (decimalGroups != null) for (int group : decimalGroups) max += group; final int maxVisibleDecimals = max; int smallestUnitExponent = monetary.smallestUnitExponent(); checkState(maxVisibleDecimals <= smallestUnitExponent, () -> "maxVisibleDecimals cannot exceed " + smallestUnitExponent + ": " + maxVisibleDecimals); // convert to decimal long satoshis = Math.abs(monetary.getValue()); int decimalShift = smallestUnitExponent - shift; DecimalNumber decimal = satoshisToDecimal(satoshis, roundingMode, decimalShift, maxVisibleDecimals); long numbers = decimal.numbers; long decimals = decimal.decimals; // formatting String decimalsStr = decimalShift > 0 ? String.format(Locale.US, "%0" + Integer.toString(decimalShift) + "d", decimals) : ""; StringBuilder str = new StringBuilder(decimalsStr); while (str.length() > minDecimals && str.charAt(str.length() - 1) == '0') str.setLength(str.length() - 1); // trim trailing zero int i = minDecimals; if (decimalGroups != null) { for (int group : decimalGroups) { if (str.length() > i && str.length() < i + group) { while (str.length() < i + group) str.append('0'); break; } i += group; } } if (str.length() > 0) str.insert(0, decimalMark); str.insert(0, numbers); if (monetary.getValue() < 0) str.insert(0, negativeSign); else if (positiveSign != 0) str.insert(0, positiveSign); if (codes != null) { if (codePrefixed) { str.insert(0, codeSeparator); str.insert(0, code()); } else { str.append(codeSeparator); str.append(code()); } } // Convert to non-arabic digits. if (zeroDigit != '0') { int offset = zeroDigit - '0'; for (int d = 0; d < str.length(); d++) { char c = str.charAt(d); if (Character.isDigit(c)) str.setCharAt(d, (char) (c + offset)); } } return str; }
@Test public void standardSymbol() { assertEquals(MonetaryFormat.SYMBOL_BTC + " 0.00", new MonetaryFormat(true).format(Coin.ZERO).toString()); }
@Override public List<String> listPartitionNames(Connection connection, String databaseName, String tableName) { String partitionNamesQuery = "SELECT PARTITION_DESCRIPTION as NAME FROM INFORMATION_SCHEMA.PARTITIONS WHERE TABLE_SCHEMA = ? " + "AND TABLE_NAME = ? AND PARTITION_NAME IS NOT NULL " + "AND ( PARTITION_METHOD = 'RANGE' or PARTITION_METHOD = 'RANGE COLUMNS') ORDER BY PARTITION_DESCRIPTION"; try (PreparedStatement ps = connection.prepareStatement(partitionNamesQuery)) { ps.setString(1, databaseName); ps.setString(2, tableName); ResultSet rs = ps.executeQuery(); ImmutableList.Builder<String> list = ImmutableList.builder(); if (null != rs) { while (rs.next()) { String[] partitionNames = rs.getString("NAME"). replace("'", "").split(","); for (String partitionName : partitionNames) { list.add(partitionName); } } return list.build(); } else { return Lists.newArrayList(); } } catch (SQLException | NullPointerException e) { throw new StarRocksConnectorException(e.getMessage(), e); } }
@Test public void testListPartitionNamesWithCache() { try { JDBCCacheTestUtil.openCacheEnable(connectContext); JDBCMetadata jdbcMetadata = new JDBCMetadata(properties, "catalog", dataSource); List<String> partitionNames = jdbcMetadata.listPartitionNames("test", "tbl1", TableVersionRange.empty()); Assert.assertFalse(partitionNames.isEmpty()); List<String> partitionNamesWithCache = jdbcMetadata.listPartitionNames("test", "tbl1", TableVersionRange.empty()); Assert.assertFalse(partitionNamesWithCache.isEmpty()); JDBCCacheTestUtil.closeCacheEnable(connectContext); Map<String, String> properties = new HashMap<>(); jdbcMetadata.refreshCache(properties); List<String> partitionNamesWithOutCache = jdbcMetadata.listPartitionNames("test", "tbl1", TableVersionRange.empty()); Assert.assertTrue(partitionNamesWithOutCache.isEmpty()); } catch (Exception e) { System.out.println(e.getMessage()); Assert.fail(); } }
public static Subject.Factory<Re2jStringSubject, String> re2jString() { return Re2jStringSubject.FACTORY; }
@Test public void matches_string_succeeds() { assertAbout(re2jString()).that("hello world").matches(PATTERN_STR); }
public static boolean versionSupportsMultiKeyPullQuery(final String ksqlServerVersion) { final KsqlVersion version; try { version = new KsqlVersion(ksqlServerVersion); } catch (IllegalArgumentException e) { LOGGER.warn("Could not parse ksqlDB server version to verify whether multi-key pull queries " + "are supported. Falling back to single-key pull queries only."); return false; } return version.isAtLeast(new KsqlVersion("6.1.")); }
@Test public void shouldReturnMultiKeyPullQueriesSupported() { assertThat(versionSupportsMultiKeyPullQuery("v6.1.0"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("v6.2.1"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("v0.14.0"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("v0.14.1"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("v0.15.0"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("v0.15.0-rc899"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("v0.15.0-rc899-ksqldb"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("6.1.0"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("6.2.1"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("0.14.0"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("0.14.1"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("0.15.0"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("0.15.0-rc899"), is(true)); assertThat(versionSupportsMultiKeyPullQuery("0.15.0-rc899-ksqldb"), is(true)); }
public static Predicate parse(String expression) { final Stack<Predicate> predicateStack = new Stack<>(); final Stack<Character> operatorStack = new Stack<>(); final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll(""); final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true); boolean isTokenMode = true; while (true) { final Character operator; final String token; if (isTokenMode) { if (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); } else { break; } if (OPERATORS.contains(token)) { operator = token.charAt(0); } else { operator = null; } } else { operator = operatorStack.pop(); token = null; } isTokenMode = true; if (operator == null) { try { predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance()); } catch (ClassCastException e) { throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e); } catch (Exception e) { throw new RuntimeException(e); } } else { if (operatorStack.empty() || operator == '(') { operatorStack.push(operator); } else if (operator == ')') { while (operatorStack.peek() != '(') { evaluate(predicateStack, operatorStack); } operatorStack.pop(); } else { if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek())) { evaluate(predicateStack, operatorStack); isTokenMode = false; } operatorStack.push(operator); } } } while (!operatorStack.empty()) { evaluate(predicateStack, operatorStack); } if (predicateStack.size() > 1) { throw new RuntimeException("Invalid logical expression"); } return predicateStack.pop(); }
@Test(expectedExceptions = EmptyStackException.class) public void testOrMissingOperand2() { PredicateExpressionParser.parse("com.linkedin.data.it.AlwaysFalsePredicate | "); }
@Override public DeleteConsumerGroupOffsetsResult deleteConsumerGroupOffsets( String groupId, Set<TopicPartition> partitions, DeleteConsumerGroupOffsetsOptions options) { SimpleAdminApiFuture<CoordinatorKey, Map<TopicPartition, Errors>> future = DeleteConsumerGroupOffsetsHandler.newFuture(groupId); DeleteConsumerGroupOffsetsHandler handler = new DeleteConsumerGroupOffsetsHandler(groupId, partitions, logContext); invokeDriver(handler, future, options.timeoutMs); return new DeleteConsumerGroupOffsetsResult(future.get(CoordinatorKey.byGroupId(groupId)), partitions); }
@Test public void testDeleteConsumerGroupOffsets() throws Exception { // Happy path final TopicPartition tp1 = new TopicPartition("foo", 0); final TopicPartition tp2 = new TopicPartition("bar", 0); final TopicPartition tp3 = new TopicPartition("foobar", 0); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(1, 0))) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse( prepareFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); env.kafkaClient().prepareResponse(new OffsetDeleteResponse( new OffsetDeleteResponseData() .setTopics(new OffsetDeleteResponseTopicCollection(Stream.of( new OffsetDeleteResponseTopic() .setName("foo") .setPartitions(new OffsetDeleteResponsePartitionCollection(Collections.singletonList( new OffsetDeleteResponsePartition() .setPartitionIndex(0) .setErrorCode(Errors.NONE.code()) ).iterator())), new OffsetDeleteResponseTopic() .setName("bar") .setPartitions(new OffsetDeleteResponsePartitionCollection(Collections.singletonList( new OffsetDeleteResponsePartition() .setPartitionIndex(0) .setErrorCode(Errors.GROUP_SUBSCRIBED_TO_TOPIC.code()) ).iterator())) ).collect(Collectors.toList()).iterator())) ) ); final DeleteConsumerGroupOffsetsResult errorResult = env.adminClient().deleteConsumerGroupOffsets( GROUP_ID, Stream.of(tp1, tp2).collect(Collectors.toSet())); assertNull(errorResult.partitionResult(tp1).get()); TestUtils.assertFutureError(errorResult.all(), GroupSubscribedToTopicException.class); TestUtils.assertFutureError(errorResult.partitionResult(tp2), GroupSubscribedToTopicException.class); assertThrows(IllegalArgumentException.class, () -> errorResult.partitionResult(tp3)); } }
@Override public int generate(final Properties props) { if (!props.containsKey(WORKER_ID_KEY)) { return DEFAULT_WORKER_ID; } int result = Integer.parseInt(props.get(WORKER_ID_KEY).toString()); ShardingSpherePreconditions.checkState(result >= 0 && result <= MAX_WORKER_ID, WorkerIdAssignedException::new); return result; }
@Test void assertGenerateWithInvalidProperties() { assertThrows(WorkerIdAssignedException.class, () -> new StandaloneWorkerIdGenerator().generate(PropertiesBuilder.build(new Property(WorkerIdGenerator.WORKER_ID_KEY, "1024")))); }
public void update(String namespaceName, String extensionName) throws InterruptedException { if(BuiltInExtensionUtil.isBuiltIn(namespaceName)) { LOGGER.debug("SKIP BUILT-IN EXTENSION {}", NamingUtil.toExtensionId(namespaceName, extensionName)); return; } var extension = repositories.findPublicId(namespaceName, extensionName); var extensionUpdates = new HashMap<Long, String>(); updateExtensionPublicId(extension, extensionUpdates, false); if(!extensionUpdates.isEmpty()) { repositories.updateExtensionPublicIds(extensionUpdates); } var namespaceUpdates = new HashMap<Long, String>(); updateNamespacePublicId(extension, namespaceUpdates, false); if(!namespaceUpdates.isEmpty()) { repositories.updateNamespacePublicIds(namespaceUpdates); } }
@Test public void testMustUpdateRandom() throws InterruptedException { var namespaceName1 = "foo"; var namespacePublicId1 = UUID.randomUUID().toString(); var extensionName1 = "bar"; var extensionPublicId1 = UUID.randomUUID().toString(); var namespace1 = new Namespace(); namespace1.setId(1L); namespace1.setName(namespaceName1); var extension1 = new Extension(); extension1.setId(2L); extension1.setName(extensionName1); extension1.setNamespace(namespace1); var namespaceName2 = "baz"; var namespacePublicId2 = UUID.randomUUID().toString(); var extensionName2 = "foobar"; var extensionPublicId2 = UUID.randomUUID().toString(); var namespace2 = new Namespace(); namespace2.setId(3L); namespace2.setName(namespaceName2); namespace2.setPublicId(namespacePublicId1); var extension2 = new Extension(); extension2.setId(4L); extension2.setName(extensionName2); extension2.setPublicId(extensionPublicId1); extension2.setNamespace(namespace2); Mockito.when(repositories.findPublicId(namespaceName1, extensionName1)).thenReturn(extension1); Mockito.when(repositories.findPublicId(extensionPublicId1)).thenReturn(extension2); Mockito.when(repositories.findNamespacePublicId(namespacePublicId1)).thenReturn(extension2); var upstreamPublicIds = new PublicIds(namespacePublicId1, extensionPublicId1); Mockito.when(idService.getUpstreamPublicIds(extension1)).thenReturn(upstreamPublicIds); Mockito.when(idService.getUpstreamPublicIds(extension2)).thenReturn(upstreamPublicIds); Mockito.when(idService.getRandomPublicId()).thenReturn(extensionPublicId2, namespacePublicId2); updateService.update(namespaceName1, extensionName1); Mockito.verify(repositories).updateExtensionPublicIds(Mockito.argThat((Map<Long, String> map) -> { return map.size() == 2 && map.get(extension1.getId()).equals(extensionPublicId1) && map.get(extension2.getId()).equals(extensionPublicId2); })); Mockito.verify(repositories).updateNamespacePublicIds(Mockito.argThat((Map<Long, String> map) -> { return map.size() == 2 && map.get(namespace1.getId()).equals(namespacePublicId1) && map.get(namespace2.getId()).equals(namespacePublicId2); })); }
@Override public KeyValueStore<K, V> build() { return new MeteredKeyValueStore<>( maybeWrapCaching(maybeWrapLogging(storeSupplier.get())), storeSupplier.metricsScope(), time, keySerde, valueSerde); }
@Test public void shouldHaveChangeLoggingStoreByDefault() { setUp(); final KeyValueStore<String, String> store = builder.build(); assertThat(store, instanceOf(MeteredKeyValueStore.class)); final StateStore next = ((WrappedStateStore) store).wrapped(); assertThat(next, instanceOf(ChangeLoggingKeyValueBytesStore.class)); }
void handleLine(final String line) { final String trimmedLine = Optional.ofNullable(line).orElse("").trim(); if (trimmedLine.isEmpty()) { return; } handleStatements(trimmedLine); }
@Test public void shouldIssueNonCCloudConnectorRequest() throws Exception { // Given: final KsqlRestClient mockRestClient = givenMockRestClient(); when(mockRestClient.getIsCCloudServer()).thenReturn(false); when(mockRestClient.makeConnectorRequest(anyString(), anyLong())) .thenReturn(RestResponse.successful( OK.code(), new KsqlEntityList(Collections.singletonList( new ConnectorList("list connectors;", Collections.emptyList(), Collections.emptyList()))) )); // When: localCli.handleLine("list connectors;"); // Then: verify(mockRestClient).makeConnectorRequest(anyString(), anyLong()); }
@Override public Optional<EncryptColumnNameReviser> getColumnNameReviser(final EncryptRule rule, final String tableName) { return rule.findEncryptTable(tableName).map(EncryptColumnNameReviser::new); }
@Test void assertGetColumnNameReviser() { Optional<EncryptColumnNameReviser> columnNameReviser = new EncryptMetaDataReviseEntry().getColumnNameReviser(createEncryptRule(), TABLE_NAME); assertTrue(columnNameReviser.isPresent()); assertThat(columnNameReviser.get().getClass(), is(EncryptColumnNameReviser.class)); }
static List<String> substituteVariables(List<String> line, Map<String, String> vars) { return line.stream().map(s -> PulsarShell.substituteVariables(s, vars)).collect(Collectors.toList()); }
@Test public void testSubstituteVariables() throws Exception { Map<String, String> vars = new HashMap<>(); vars.put("mytopic", "the-topic"); assertEquals( PulsarShell.substituteVariables(Arrays.asList("admin", "topics", "create", "${mytopic}"), vars), Arrays.asList("admin", "topics", "create", "the-topic") ); assertEquals( PulsarShell.substituteVariables(Arrays.asList("admin", "topics", "create", "\\${mytopic}"), vars), Arrays.asList("admin", "topics", "create", "${mytopic}") ); assertEquals( PulsarShell.substituteVariables(Arrays.asList("admin", "topics", "create", "${MYTOPIC}"), vars), Arrays.asList("admin", "topics", "create", "${MYTOPIC}") ); assertEquals( PulsarShell.substituteVariables(Arrays.asList("admin", "topics", "create", "$mytopic"), vars), Arrays.asList("admin", "topics", "create", "the-topic") ); assertEquals( PulsarShell.substituteVariables(Arrays.asList("admin", "topics", "create", "\\$mytopic"), vars), Arrays.asList("admin", "topics", "create", "$mytopic") ); }
@Override public List<Object> handle(String targetName, List<Object> instances, RequestData requestData) { if (!shouldHandle(instances)) { return instances; } List<Object> result = getTargetInstancesByRules(targetName, instances); return super.handle(targetName, result, requestData); }
@Test public void testGetTargetInstancesByTagRulesWithGlobalRules() { RuleInitializationUtils.initGlobalAndServiceTagMatchRules(); List<Object> instances = new ArrayList<>(); ServiceInstance instance1 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.0"); instances.add(instance1); ServiceInstance instance2 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.1"); instances.add(instance2); Map<String, String> metadata = new HashMap<>(); metadata.put("group", "red"); AppCache.INSTANCE.setMetadata(metadata); List<Object> targetInstances = tagRouteHandler.handle("foo", instances, new RequestData(null, null, null)); Assert.assertEquals(1, targetInstances.size()); Assert.assertEquals(instance2, targetInstances.get(0)); ConfigCache.getLabel(RouterConstant.SPRING_CACHE_NAME).resetRouteRule(Collections.emptyMap()); ConfigCache.getLabel(RouterConstant.SPRING_CACHE_NAME).resetGlobalRule(Collections.emptyList()); }
public Subject getSubject() { return subject; }
@Test public void testGetSubject() throws Exception { final Subject subject = mock(Subject.class); final ShiroPrincipal shiroPrincipal = new ShiroPrincipal(subject); assertThat(shiroPrincipal.getSubject()).isSameAs(subject); }
public static Type fromHiveType(String hiveType) { String typeUpperCase = getTypeKeyword(hiveType).toUpperCase(); PrimitiveType primitiveType; switch (typeUpperCase) { case "TINYINT": primitiveType = PrimitiveType.TINYINT; break; case "SMALLINT": primitiveType = PrimitiveType.SMALLINT; break; case "INT": case "INTEGER": primitiveType = PrimitiveType.INT; break; case "BIGINT": primitiveType = PrimitiveType.BIGINT; break; case "FLOAT": primitiveType = PrimitiveType.FLOAT; break; case "DOUBLE": case "DOUBLE PRECISION": primitiveType = PrimitiveType.DOUBLE; break; case "DECIMAL": case "NUMERIC": primitiveType = PrimitiveType.DECIMAL32; break; case "TIMESTAMP": primitiveType = PrimitiveType.DATETIME; break; case "DATE": primitiveType = PrimitiveType.DATE; break; case "STRING": return ScalarType.createDefaultCatalogString(); case "VARCHAR": return ScalarType.createVarcharType(getVarcharLength(hiveType)); case "CHAR": return ScalarType.createCharType(getCharLength(hiveType)); case "BINARY": return Type.VARBINARY; case "BOOLEAN": primitiveType = PrimitiveType.BOOLEAN; break; case "ARRAY": Type type = fromHiveTypeToArrayType(hiveType); if (type.isArrayType()) { return type; } else { return Type.UNKNOWN_TYPE; } case "MAP": Type mapType = fromHiveTypeToMapType(hiveType); if (mapType.isMapType()) { return mapType; } else { return Type.UNKNOWN_TYPE; } case "STRUCT": Type structType = fromHiveTypeToStructType(hiveType); if (structType.isStructType()) { return structType; } else { return Type.UNKNOWN_TYPE; } default: primitiveType = PrimitiveType.UNKNOWN_TYPE; break; } if (primitiveType != PrimitiveType.DECIMAL32) { return ScalarType.createType(primitiveType); } else { int[] parts = getPrecisionAndScale(hiveType); return ScalarType.createUnifiedDecimalType(parts[0], parts[1]); } }
@Test public void testCharString() { Type charType = ScalarType.createCharType(100); String typeStr = "char(100)"; Type resType = ColumnTypeConverter.fromHiveType(typeStr); Assert.assertEquals(resType, charType); typeStr = "char(50)"; resType = ColumnTypeConverter.fromHiveType(typeStr); Assert.assertNotEquals(resType, charType); }
private static CloseablePath open(URI jarUri, Function<FileSystem, Path> pathProvider) throws IOException { FileSystem fileSystem = openFileSystem(jarUri); Path path = pathProvider.apply(fileSystem); return CloseablePath.open(path, () -> closeFileSystem(jarUri)); }
@Test void canOpenMultipleConcurrently() throws IOException, URISyntaxException { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); URI first = getUrisForPackage(classLoader, "io.cucumber").stream() .filter(JarUriFileSystemService::supports) .findFirst() .orElseThrow(IllegalStateException::new); CloseablePath path1 = JarUriFileSystemService.open(first); FileSystem fileSystem1 = path1.getPath().getFileSystem(); CloseablePath path2 = JarUriFileSystemService.open(first); FileSystem fileSystem2 = path2.getPath().getFileSystem(); assertThat(fileSystem1, is(fileSystem2)); path1.close(); assertTrue(fileSystem1.isOpen()); assertTrue(fileSystem2.isOpen()); path2.close(); assertFalse(fileSystem1.isOpen()); assertFalse(fileSystem2.isOpen()); }
public void update(JobID jobId, BlobKey blobKey) { checkNotNull(jobId); checkNotNull(blobKey); synchronized (lock) { caches.get(Tuple2.of(jobId, blobKey)); } }
@Test void testUpdate() { BlobCacheSizeTracker tracker = new BlobCacheSizeTracker(5L); List<JobID> jobIds = new ArrayList<>(); List<BlobKey> blobKeys = new ArrayList<>(); for (int i = 0; i < 5; i++) { jobIds.add(new JobID()); blobKeys.add(BlobKey.createKey(BlobType.PERMANENT_BLOB)); } for (int i = 0; i < 5; i++) { tracker.track(jobIds.get(i), blobKeys.get(i), 1); } tracker.update(jobIds.get(1), blobKeys.get(1)); tracker.update(jobIds.get(2), blobKeys.get(2)); List<Tuple2<JobID, BlobKey>> blobsToDelete = tracker.checkLimit(2); assertThat(blobsToDelete) .contains( Tuple2.of(jobIds.get(0), blobKeys.get(0)), Tuple2.of(jobIds.get(3), blobKeys.get(3))); }
public static boolean fullyDelete(final File dir) { return fullyDelete(dir, false); }
@Test (timeout = 30000) public void testFailFullyDeleteGrantPermissions() throws IOException { setupDirsAndNonWritablePermissions(); boolean ret = FileUtil.fullyDelete(new MyFile(del), true); // this time the directories with revoked permissions *should* be deleted: validateAndSetWritablePermissions(false, ret); }
public ECPoint getQ() { return q; }
@Test public void shouldConvertPublicPoint() { assertEquals(Q, new EcPublicKey( new ECPublicKeyParameters(Q, PARAMS)).getQ()); }
public static <T> List<T> copyToList(Collection<?> collection, Class<T> targetType, CopyOptions copyOptions) { if (null == collection) { return null; } if (collection.isEmpty()) { return new ArrayList<>(0); } // issue#3091 if(ClassUtil.isBasicType(targetType) || String.class == targetType){ return Convert.toList(targetType, collection); } return collection.stream().map((source) -> { final T target = ReflectUtil.newInstanceIfPossible(targetType); copyProperties(source, target, copyOptions); return target; }).collect(Collectors.toList()); }
@Test public void copyListTest() { final Student student = new Student(); student.setName("张三"); student.setAge(123); student.setNo(3158L); final Student student2 = new Student(); student.setName("李四"); student.setAge(125); student.setNo(8848L); final List<Student> studentList = ListUtil.of(student, student2); final List<Person> people = BeanUtil.copyToList(studentList, Person.class); assertEquals(studentList.size(), people.size()); for (int i = 0; i < studentList.size(); i++) { assertEquals(studentList.get(i).getName(), people.get(i).getName()); assertEquals(studentList.get(i).getAge(), people.get(i).getAge()); } }
public static <ID, T> TaskDispatcher<ID, T> createBatchingTaskDispatcher(String id, int maxBufferSize, int workloadSize, int workerCount, long maxBatchingDelay, long congestionRetryDelayMs, long networkFailureRetryMs, TaskProcessor<T> taskProcessor) { final AcceptorExecutor<ID, T> acceptorExecutor = new AcceptorExecutor<>( id, maxBufferSize, workloadSize, maxBatchingDelay, congestionRetryDelayMs, networkFailureRetryMs ); final TaskExecutors<ID, T> taskExecutor = TaskExecutors.batchExecutors(id, workerCount, taskProcessor, acceptorExecutor); return new TaskDispatcher<ID, T>() { @Override public void process(ID id, T task, long expiryTime) { acceptorExecutor.process(id, task, expiryTime); } @Override public void shutdown() { acceptorExecutor.shutdown(); taskExecutor.shutdown(); } }; }
@Test public void testBatchingDispatcher() throws Exception { dispatcher = TaskDispatchers.createBatchingTaskDispatcher( "TEST", MAX_BUFFER_SIZE, WORK_LOAD_SIZE, 1, MAX_BATCHING_DELAY_MS, SERVER_UNAVAILABLE_SLEEP_TIME_MS, RETRY_SLEEP_TIME_MS, processor ); dispatcher.process(1, ProcessingResult.Success, System.currentTimeMillis() + 60 * 1000); dispatcher.process(2, ProcessingResult.Success, System.currentTimeMillis() + 60 * 1000); processor.expectSuccesses(2); }
@Override public DescribeMetadataQuorumResult describeMetadataQuorum(DescribeMetadataQuorumOptions options) { NodeProvider provider = new LeastLoadedBrokerOrActiveKController(); final KafkaFutureImpl<QuorumInfo> future = new KafkaFutureImpl<>(); final long now = time.milliseconds(); final Call call = new Call( "describeMetadataQuorum", calcDeadlineMs(now, options.timeoutMs()), provider) { private QuorumInfo.ReplicaState translateReplicaState(DescribeQuorumResponseData.ReplicaState replica) { return new QuorumInfo.ReplicaState( replica.replicaId(), replica.replicaDirectoryId() == null ? Uuid.ZERO_UUID : replica.replicaDirectoryId(), replica.logEndOffset(), replica.lastFetchTimestamp() == -1 ? OptionalLong.empty() : OptionalLong.of(replica.lastFetchTimestamp()), replica.lastCaughtUpTimestamp() == -1 ? OptionalLong.empty() : OptionalLong.of(replica.lastCaughtUpTimestamp())); } private QuorumInfo createQuorumResult(final DescribeQuorumResponseData.PartitionData partition, DescribeQuorumResponseData.NodeCollection nodeCollection) { List<QuorumInfo.ReplicaState> voters = partition.currentVoters().stream() .map(this::translateReplicaState) .collect(Collectors.toList()); List<QuorumInfo.ReplicaState> observers = partition.observers().stream() .map(this::translateReplicaState) .collect(Collectors.toList()); Map<Integer, QuorumInfo.Node> nodes = nodeCollection.stream().map(n -> { List<RaftVoterEndpoint> endpoints = n.listeners().stream() .map(l -> new RaftVoterEndpoint(l.name(), l.host(), l.port())) .collect(Collectors.toList()); return new QuorumInfo.Node(n.nodeId(), endpoints); }).collect(Collectors.toMap(QuorumInfo.Node::nodeId, Function.identity())); return new QuorumInfo( partition.leaderId(), partition.leaderEpoch(), partition.highWatermark(), voters, observers, nodes ); } @Override DescribeQuorumRequest.Builder createRequest(int timeoutMs) { return new Builder(DescribeQuorumRequest.singletonRequest( new TopicPartition(CLUSTER_METADATA_TOPIC_NAME, CLUSTER_METADATA_TOPIC_PARTITION.partition()))); } @Override void handleResponse(AbstractResponse response) { final DescribeQuorumResponse quorumResponse = (DescribeQuorumResponse) response; if (quorumResponse.data().errorCode() != Errors.NONE.code()) { throw Errors.forCode(quorumResponse.data().errorCode()).exception(quorumResponse.data().errorMessage()); } if (quorumResponse.data().topics().size() != 1) { String msg = String.format("DescribeMetadataQuorum received %d topics when 1 was expected", quorumResponse.data().topics().size()); log.debug(msg); throw new UnknownServerException(msg); } DescribeQuorumResponseData.TopicData topic = quorumResponse.data().topics().get(0); if (!topic.topicName().equals(CLUSTER_METADATA_TOPIC_NAME)) { String msg = String.format("DescribeMetadataQuorum received a topic with name %s when %s was expected", topic.topicName(), CLUSTER_METADATA_TOPIC_NAME); log.debug(msg); throw new UnknownServerException(msg); } if (topic.partitions().size() != 1) { String msg = String.format("DescribeMetadataQuorum received a topic %s with %d partitions when 1 was expected", topic.topicName(), topic.partitions().size()); log.debug(msg); throw new UnknownServerException(msg); } DescribeQuorumResponseData.PartitionData partition = topic.partitions().get(0); if (partition.partitionIndex() != CLUSTER_METADATA_TOPIC_PARTITION.partition()) { String msg = String.format("DescribeMetadataQuorum received a single partition with index %d when %d was expected", partition.partitionIndex(), CLUSTER_METADATA_TOPIC_PARTITION.partition()); log.debug(msg); throw new UnknownServerException(msg); } if (partition.errorCode() != Errors.NONE.code()) { throw Errors.forCode(partition.errorCode()).exception(partition.errorMessage()); } future.complete(createQuorumResult(partition, quorumResponse.data().nodes())); } @Override void handleFailure(Throwable throwable) { future.completeExceptionally(throwable); } }; runnable.call(call, now); return new DescribeMetadataQuorumResult(future); }
@Test public void testDescribeMetadataQuorumFailure() { try (final AdminClientUnitTestEnv env = mockClientEnv()) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create(ApiKeys.DESCRIBE_QUORUM.id, ApiKeys.DESCRIBE_QUORUM.oldestVersion(), ApiKeys.DESCRIBE_QUORUM.latestVersion())); // Test top level error env.kafkaClient().prepareResponse( body -> body instanceof DescribeQuorumRequest, prepareDescribeQuorumResponse(Errors.INVALID_REQUEST, Errors.NONE, false, false, false, false, false)); KafkaFuture<QuorumInfo> future = env.adminClient().describeMetadataQuorum().quorumInfo(); TestUtils.assertFutureThrows(future, InvalidRequestException.class); // Test incorrect topic count env.kafkaClient().prepareResponse( body -> body instanceof DescribeQuorumRequest, prepareDescribeQuorumResponse(Errors.NONE, Errors.NONE, true, false, false, false, false)); future = env.adminClient().describeMetadataQuorum().quorumInfo(); TestUtils.assertFutureThrows(future, UnknownServerException.class); // Test incorrect topic name env.kafkaClient().prepareResponse( body -> body instanceof DescribeQuorumRequest, prepareDescribeQuorumResponse(Errors.NONE, Errors.NONE, false, true, false, false, false)); future = env.adminClient().describeMetadataQuorum().quorumInfo(); TestUtils.assertFutureThrows(future, UnknownServerException.class); // Test incorrect partition count env.kafkaClient().prepareResponse( body -> body instanceof DescribeQuorumRequest, prepareDescribeQuorumResponse(Errors.NONE, Errors.NONE, false, false, true, false, false)); future = env.adminClient().describeMetadataQuorum().quorumInfo(); TestUtils.assertFutureThrows(future, UnknownServerException.class); // Test incorrect partition index env.kafkaClient().prepareResponse( body -> body instanceof DescribeQuorumRequest, prepareDescribeQuorumResponse(Errors.NONE, Errors.NONE, false, false, false, true, false)); future = env.adminClient().describeMetadataQuorum().quorumInfo(); TestUtils.assertFutureThrows(future, UnknownServerException.class); // Test partition level error env.kafkaClient().prepareResponse( body -> body instanceof DescribeQuorumRequest, prepareDescribeQuorumResponse(Errors.NONE, Errors.INVALID_REQUEST, false, false, false, false, false)); future = env.adminClient().describeMetadataQuorum().quorumInfo(); TestUtils.assertFutureThrows(future, InvalidRequestException.class); // Test all incorrect and no errors env.kafkaClient().prepareResponse( body -> body instanceof DescribeQuorumRequest, prepareDescribeQuorumResponse(Errors.NONE, Errors.NONE, true, true, true, true, false)); future = env.adminClient().describeMetadataQuorum().quorumInfo(); TestUtils.assertFutureThrows(future, UnknownServerException.class); // Test all incorrect and both errors env.kafkaClient().prepareResponse( body -> body instanceof DescribeQuorumRequest, prepareDescribeQuorumResponse(Errors.INVALID_REQUEST, Errors.INVALID_REQUEST, true, true, true, true, false)); future = env.adminClient().describeMetadataQuorum().quorumInfo(); TestUtils.assertFutureThrows(future, Errors.INVALID_REQUEST.exception().getClass()); } }
@Override public void accept(final DataType data) { if (data instanceof StartingData) { handleEvent((StartingData) data); } else if (data instanceof StoppingData) { handleEvent((StoppingData) data); } }
@Test void statusRecordsTheStopTime() { //given final var stop = LocalDateTime.of(2017, Month.APRIL, 1, 19, 12); final var stoppingData = new StoppingData(stop); stoppingData.setDataBus(DataBus.getInstance()); final var statusMember = new StatusMember(1); //when statusMember.accept(stoppingData); //then assertEquals(stop, statusMember.getStopped()); }
@SuppressWarnings("MethodMayBeStatic") // Non-static to support DI. public long parse(final String text) { final String date; final String time; final String timezone; if (text.contains("T")) { date = text.substring(0, text.indexOf('T')); final String withTimezone = text.substring(text.indexOf('T') + 1); timezone = getTimezone(withTimezone); time = completeTime(withTimezone.substring(0, withTimezone.length() - timezone.length()) .replaceAll("Z$","")); } else { date = completeDate(text); time = completeTime(""); timezone = ""; } try { final ZoneId zoneId = parseTimezone(timezone); return PARSER.parse(date + "T" + time, zoneId); } catch (final RuntimeException e) { throw new KsqlException("Failed to parse timestamp '" + text + "': " + e.getMessage() + HELP_MESSAGE, e ); } }
@Test public void shouldThrowOnIncorrectlyFormattedDateTime() { // When: final KsqlException e = assertThrows( KsqlException.class, () -> parser.parse("2017-1-1") ); // Then assertThat(e.getMessage(), containsString("Failed to parse timestamp '2017-1-1'")); }
public FunctionName getFunctionName() { return functionName; }
@Test public void testNormal() throws Exception { String dropFunctionSql = "DROP FUNCTION ABC.MY_UDF_JSON_GET(string, string)"; DropFunctionStmt stmt = (DropFunctionStmt) com.starrocks.sql.parser.SqlParser.parse( dropFunctionSql, 32).get(0); // com.starrocks.sql.analyzer.Analyzer.analyze(stmt, ctx); Assert.assertEquals("ABC", stmt.getFunctionName().getDb()); Assert.assertEquals("my_udf_json_get", stmt.getFunctionName().getFunction()); }
@Override public Path move(final Path file, final Path target, final TransferStatus status, final Delete.Callback delete, final ConnectionCallback callback) throws BackgroundException { try { final EueApiClient client = new EueApiClient(session); if(status.isExists()) { if(!new CaseInsensitivePathPredicate(file).test(target)) { if(log.isWarnEnabled()) { log.warn(String.format("Trash file %s to be replaced with %s", target, file)); } new EueTrashFeature(session, fileid).delete(Collections.singletonMap(target, status), callback, delete); } } final String resourceId = fileid.getFileId(file); if(!new SimplePathPredicate(file.getParent()).test(target.getParent())) { final ResourceMoveResponseEntries resourceMoveResponseEntries; final String parentResourceId = fileid.getFileId(target.getParent()); switch(parentResourceId) { case EueResourceIdProvider.ROOT: case EueResourceIdProvider.TRASH: resourceMoveResponseEntries = new MoveChildrenForAliasApiApi(client) .resourceAliasAliasChildrenMovePost(parentResourceId, Collections.singletonList(String.format("%s/resource/%s", session.getBasePath(), resourceId)), null, null, null, "rename", null); break; default: resourceMoveResponseEntries = new MoveChildrenApi(client) .resourceResourceIdChildrenMovePost(parentResourceId, Collections.singletonList(String.format("%s/resource/%s", session.getBasePath(), resourceId)), null, null, null, "rename", null); } if(null == resourceMoveResponseEntries) { // Move of single file will return 200 status code with empty response body } else { for(ResourceMoveResponseEntry resourceMoveResponseEntry : resourceMoveResponseEntries.values()) { switch(resourceMoveResponseEntry.getStatusCode()) { case HttpStatus.SC_OK: break; default: log.warn(String.format("Failure %s moving file %s", resourceMoveResponseEntries, file)); final ResourceCreationResponseEntryEntity entity = resourceMoveResponseEntry.getEntity(); if(null == entity) { throw new EueExceptionMappingService().map(new ApiException(resourceMoveResponseEntry.getReason(), null, resourceMoveResponseEntry.getStatusCode(), client.getResponseHeaders())); } throw new EueExceptionMappingService().map(new ApiException(resourceMoveResponseEntry.getEntity().getError(), null, resourceMoveResponseEntry.getStatusCode(), client.getResponseHeaders())); } } } } if(!StringUtils.equals(file.getName(), target.getName())) { final ResourceUpdateModel resourceUpdateModel = new ResourceUpdateModel(); final ResourceUpdateModelUpdate resourceUpdateModelUpdate = new ResourceUpdateModelUpdate(); final Uifs uifs = new Uifs(); uifs.setName(target.getName()); resourceUpdateModelUpdate.setUifs(uifs); resourceUpdateModel.setUpdate(resourceUpdateModelUpdate); final ResourceMoveResponseEntries resourceMoveResponseEntries = new UpdateResourceApi(client).resourceResourceIdPatch(resourceId, resourceUpdateModel, null, null, null); if(null == resourceMoveResponseEntries) { // Move of single file will return 200 status code with empty response body } else { for(ResourceMoveResponseEntry resourceMoveResponseEntry : resourceMoveResponseEntries.values()) { switch(resourceMoveResponseEntry.getStatusCode()) { case HttpStatus.SC_CREATED: break; default: log.warn(String.format("Failure %s renaming file %s", resourceMoveResponseEntry, file)); throw new EueExceptionMappingService().map(new ApiException(resourceMoveResponseEntry.getReason(), null, resourceMoveResponseEntry.getStatusCode(), client.getResponseHeaders())); } } } } fileid.cache(file, null); return target; } catch(ApiException e) { throw new EueExceptionMappingService().map("Cannot rename {0}", e, file); } }
@Test(expected = NotfoundException.class) public void testRenameInvalidResourceId() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path file = new EueTouchFeature(session, fileid).touch(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final String resourceId = file.attributes().getFileId(); new EueDeleteFeature(session, fileid).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); new EueMoveFeature(session, fileid).move(file.withAttributes(new PathAttributes().withFileId(resourceId)), new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); }
public static List<Criterion> parse(String filter) { return StreamSupport.stream(CRITERIA_SPLITTER.split(filter).spliterator(), false) .map(FilterParser::parseCriterion) .toList(); }
@Test public void parse_filter_having_value_containing_non_alphanumeric_characters() { List<Criterion> criterion = FilterParser.parse("q = \"+*ç%&/()\""); assertThat(criterion) .extracting(Criterion::getKey, Criterion::getOperator, Criterion::getValue) .containsOnly( tuple("q", EQ, "+*ç%&/()")); }
ArrayList<String> processLines(final String filename, ArrayList<String> lines) { Collections.sort(lines, KEY_COMPARATOR); ArrayList<String> result = new ArrayList<String>(lines.size()); String lastKey = null; String lastValue = null; for (final String line : lines) { if (line.indexOf('#') == 0 || line.matches("\\s*")) continue; final String standardUnicodeLine = convertUnicodeCharacterRepresentation(line); final String[] keyValue = standardUnicodeLine.split("\\s*=\\s*", 2); if (keyValue.length != 2 || keyValue[0].length() == 0) { // broken line: no '=' sign or empty key (we had " = ======") warn(filename + ": no key/val: " + line); continue; } final String thisKey = keyValue[0]; String thisValue = keyValue[1].trim(); if (thisValue.matches("(\\[auto\\]|\\[translate me\\])?")) { warn(filename + ": drop empty translation: " + line); continue; } if (thisValue.indexOf("{1}") != -1 && keyValue[1].indexOf("{0}") == -1) { warn(filename + ": errorneous placeholders usage: {1} used without {0}: " + line); } if (thisValue.matches(".*\\$\\d.*")) { warn(filename + ": use '{0}' instead of '$1' as placeholder! (likewise for $2...): " + line); thisValue = thisValue.replaceAll("\\$1", "{0}").replaceAll("\\$2", "{1}"); } if (thisValue.matches(".*\\{\\d[^},]*")) { warn(filename + ": mismatched braces in placeholder: '{' not closed by '}': " + line); } if (lastKey != null && thisKey.equals(lastKey)) { if (quality(thisValue) < quality(lastValue)) { log(filename + ": drop " + TaskUtils.toLine(lastKey, thisValue)); continue; } else if (quality(thisValue) == quality(lastValue)) { if (thisValue.equals(lastValue)) { log(filename + ": drop duplicate " + TaskUtils.toLine(lastKey, thisValue)); } else if (quality(thisValue) == QUALITY_MANUALLY_TRANSLATED) { warn(filename // + ": drop one of two of equal quality (revisit!):keep: " + TaskUtils.toLine(lastKey, lastValue)); warn(filename // + ": drop one of two of equal quality (revisit!):drop: " + TaskUtils.toLine(thisKey, thisValue)); } else { log(filename + ": drop " + TaskUtils.toLine(lastKey, thisValue)); } continue; } else { log(filename + ": drop " + TaskUtils.toLine(lastKey, lastValue)); } lastValue = thisValue; } else { if (lastKey != null) result.add(TaskUtils.toLine(lastKey, lastValue)); lastKey = thisKey; lastValue = thisValue; } } if (lastKey != null) result.add(TaskUtils.toLine(lastKey, lastValue)); return result; }
@Test public void testPlaceholderCheck() throws Exception { final FormatTranslation formatTranslation = new FormatTranslation(); String input; ArrayList<String> lines = new ArrayList<String>(); // input = "x = a {1} without a 0\n" // + "y = a $1 instead of a {0}"; // no actual test as long as those tests are not treated as failures TaskUtils.checkEolStyleAndReadLines(input, lines, unix); formatTranslation.processLines("a_file", new ArrayList<String>(lines)); }
public byte[] getHl7MessageBytes() { return hl7MessageBytes; }
@Test public void testGetHl7MessageBytes() { instance = new MllpException(EXCEPTION_MESSAGE, LOG_PHI_TRUE); assertNull(instance.getHl7MessageBytes()); instance = new MllpException(EXCEPTION_MESSAGE, NULL_BYTE_ARRAY, LOG_PHI_TRUE); assertNull(instance.getHl7MessageBytes()); instance = new MllpException(EXCEPTION_MESSAGE, NULL_BYTE_ARRAY, NULL_BYTE_ARRAY, LOG_PHI_TRUE); assertNull(instance.getHl7MessageBytes()); instance = new MllpException(EXCEPTION_MESSAGE, NULL_BYTE_ARRAY, EMPTY_BYTE_ARRAY, LOG_PHI_TRUE); assertNull(instance.getHl7MessageBytes()); instance = new MllpException(EXCEPTION_MESSAGE, EMPTY_BYTE_ARRAY, LOG_PHI_TRUE); assertNull(instance.getHl7MessageBytes()); instance = new MllpException(EXCEPTION_MESSAGE, EMPTY_BYTE_ARRAY, NULL_BYTE_ARRAY, LOG_PHI_TRUE); assertNull(instance.getHl7MessageBytes()); instance = new MllpException(EXCEPTION_MESSAGE, EMPTY_BYTE_ARRAY, EMPTY_BYTE_ARRAY, LOG_PHI_TRUE); assertNull(instance.getHl7MessageBytes()); instance = new MllpException(EXCEPTION_MESSAGE, HL7_MESSAGE_BYTES, LOG_PHI_TRUE); assertArrayEquals(HL7_MESSAGE_BYTES, instance.getHl7MessageBytes()); instance = new MllpException(EXCEPTION_MESSAGE, HL7_MESSAGE_BYTES, NULL_BYTE_ARRAY, LOG_PHI_TRUE); assertArrayEquals(HL7_MESSAGE_BYTES, instance.getHl7MessageBytes()); instance = new MllpException(EXCEPTION_MESSAGE, HL7_MESSAGE_BYTES, EMPTY_BYTE_ARRAY, LOG_PHI_TRUE); assertArrayEquals(HL7_MESSAGE_BYTES, instance.getHl7MessageBytes()); instance = new MllpException(EXCEPTION_MESSAGE, HL7_MESSAGE_BYTES, HL7_ACKNOWLEDGEMENT_BYTES, LOG_PHI_TRUE); assertArrayEquals(HL7_MESSAGE_BYTES, instance.getHl7MessageBytes()); }
public V get(K1 key1, K2 key2) { checkNotNull(key1, "Key1 cannot be null"); checkNotNull(key2, "Key2 cannot be null"); Map<K2, V> innerMap = backingMap.get(key1); if (innerMap == null) { return null; } return innerMap.get(key2); }
@Test public void givenEmpty_whenGet_thenReturnNull() { String value = map.get("key1", "key2"); assertNull(value); }
public Map<String, String> mergeOptions( MergingStrategy mergingStrategy, Map<String, String> sourceOptions, Map<String, String> derivedOptions) { Map<String, String> options = new HashMap<>(); if (mergingStrategy != MergingStrategy.EXCLUDING) { options.putAll(sourceOptions); } derivedOptions.forEach( (key, value) -> { if (mergingStrategy != MergingStrategy.OVERWRITING && options.containsKey(key)) { throw new ValidationException( String.format( "There already exists an option ['%s' -> '%s'] in the " + "base table. You might want to specify EXCLUDING OPTIONS or OVERWRITING OPTIONS.", key, options.get(key))); } options.put(key, value); }); return options; }
@Test void mergeOverwritingOptionsDuplicate() { Map<String, String> sourceOptions = new HashMap<>(); sourceOptions.put("offset", "1"); sourceOptions.put("format", "json"); Map<String, String> derivedOptions = new HashMap<>(); derivedOptions.put("offset", "2"); derivedOptions.put("format.ignore-errors", "true"); Map<String, String> mergedOptions = util.mergeOptions(MergingStrategy.OVERWRITING, sourceOptions, derivedOptions); Map<String, String> expectedOptions = new HashMap<>(); expectedOptions.put("offset", "2"); expectedOptions.put("format", "json"); expectedOptions.put("format.ignore-errors", "true"); assertThat(mergedOptions).isEqualTo(expectedOptions); }
public static List<FieldSchema> convert(Schema schema) { return schema.columns().stream() .map(col -> new FieldSchema(col.name(), convertToTypeString(col.type()), col.doc())) .collect(Collectors.toList()); }
@Test public void testComplexSchemaConvertToIcebergSchema() { assertThat(HiveSchemaUtil.convert(COMPLEX_HIVE_SCHEMA).asStruct()).isEqualTo(COMPLEX_ICEBERG_SCHEMA.asStruct()); }
public void processVerstrekkingAanAfnemer(VerstrekkingAanAfnemer verstrekkingAanAfnemer){ if (logger.isDebugEnabled()) logger.debug("Processing verstrekkingAanAfnemer: {}", marshallElement(verstrekkingAanAfnemer)); Afnemersbericht afnemersbericht = afnemersberichtRepository.findByOnzeReferentie(verstrekkingAanAfnemer.getReferentieId()); if(mismatch(verstrekkingAanAfnemer, afnemersbericht)){ digidXClient.remoteLogBericht(Log.NO_RELATION_TO_SENT_MESSAGE, verstrekkingAanAfnemer, afnemersbericht); return; } switch (verstrekkingAanAfnemer.getGebeurtenissoort().getNaam()) { case "Null" -> { logger.info("Start processing Null message"); dglResponseService.processNullMessage(verstrekkingAanAfnemer.getGebeurtenisinhoud().getNull(), afnemersbericht); digidXClient.remoteLogWithoutRelatingToAccount(Log.MESSAGE_PROCESSED, "Null"); } case "Ag01" -> { logger.info("Start processing Ag01 message"); dglResponseService.processAg01(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAg01(), afnemersbericht); digidXClient.remoteLogBericht(Log.MESSAGE_PROCESSED, verstrekkingAanAfnemer, afnemersbericht); } case "Ag31" -> { logger.info("Start processing Ag31 message"); dglResponseService.processAg31(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAg31(), afnemersbericht); digidXClient.remoteLogBericht(Log.MESSAGE_PROCESSED, verstrekkingAanAfnemer, afnemersbericht); } case "Af01" -> { logger.info("Start processing Af01 message"); dglResponseService.processAf01(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAf01(), afnemersbericht); digidXClient.remoteLogBericht(Log.MESSAGE_PROCESSED, verstrekkingAanAfnemer, afnemersbericht); } case "Af11" -> { logger.info("Start processing Af11 message"); dglResponseService.processAf11(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAf11(), afnemersbericht); digidXClient.remoteLogWithoutRelatingToAccount(Log.MESSAGE_PROCESSED, "Af11"); } case "Gv01" -> { logger.info("Start processing Gv01 message"); Gv01 gv01 = verstrekkingAanAfnemer.getGebeurtenisinhoud().getGv01(); dglResponseService.processGv01(gv01); String bsn = CategorieUtil.findBsnOudeWaarde(gv01.getCategorie()); if (bsn == null) { bsn = CategorieUtil.findBsn(gv01.getCategorie()); } digidXClient.remoteLogSpontaneVerstrekking(Log.MESSAGE_PROCESSED, "Gv01", gv01.getANummer(), bsn); } case "Ng01" -> { logger.info("Start processing Ng01 message"); Ng01 ng01 = verstrekkingAanAfnemer.getGebeurtenisinhoud().getNg01(); dglResponseService.processNg01(ng01); digidXClient.remoteLogSpontaneVerstrekking(Log.MESSAGE_PROCESSED, "Ng01", CategorieUtil.findANummer(ng01.getCategorie()), ""); } case "Wa11" -> { logger.info("Start processing Wa11 message"); dglResponseService.processWa11(verstrekkingAanAfnemer.getGebeurtenisinhoud().getWa11()); } } }
@Test public void testProcessGv01(){ String testAnummer = "SSSSSSSSSS"; String testBsnOud = "SSSSSSSSS"; String testBsnNieuw = "SSSSSSSSS"; Gv01 testGv01 = TestDglMessagesUtil.createTestGv01(testAnummer, "O", testBsnOud, testBsnNieuw); VerstrekkingInhoudType inhoudType = new VerstrekkingInhoudType(); inhoudType.setGv01(testGv01); GeversioneerdType type = new GeversioneerdType(); type.setNaam("Gv01"); when(verstrekkingAanAfnemer.getReferentieId()).thenReturn(null); when(verstrekkingAanAfnemer.getGebeurtenissoort()).thenReturn(type); when(verstrekkingAanAfnemer.getGebeurtenisinhoud()).thenReturn(inhoudType); classUnderTest.processVerstrekkingAanAfnemer(verstrekkingAanAfnemer); verify(dglResponseService, times(1)).processGv01(testGv01); }
@Override public long approximateNumEntries() { final List<ReadOnlyKeyValueStore<K, V>> stores = storeProvider.stores(storeName, storeType); long total = 0; for (final ReadOnlyKeyValueStore<K, V> store : stores) { total += store.approximateNumEntries(); if (total < 0) { return Long.MAX_VALUE; } } return total; }
@Test public void shouldReturnLongMaxValueOnUnderflow() { stubProviderTwo.addStore(storeName, new NoOpReadOnlyStore<Object, Object>() { @Override public long approximateNumEntries() { return Long.MAX_VALUE; } }); stubProviderTwo.addStore("my-storeA", new NoOpReadOnlyStore<Object, Object>() { @Override public long approximateNumEntries() { return Long.MAX_VALUE; } }); assertEquals(Long.MAX_VALUE, theStore.approximateNumEntries()); }
@Override public Object clone() { Class<? extends BasePacket> packetClass = this.getClass(); Method[] allMethods = packetClass.getDeclaredMethods(); Method deserializerFactory = null; for (Method m : allMethods) { String mname = m.getName(); if (mname.equals("deserializer")) { deserializerFactory = m; break; } } if (deserializerFactory == null) { throw new IllegalStateException("No Deserializer found for " + packetClass.getName()); } byte[] data = serialize(); try { Deserializer deserializer = (Deserializer) deserializerFactory.invoke(this); return deserializer.deserialize(data, 0, data.length); } catch (IllegalAccessException | InvocationTargetException | DeserializationException ex) { throw new IllegalStateException(ex); } }
@Test public void testClone() { Ethernet p1 = new Ethernet(); p1.sourceMACAddress = MacAddress.ONOS; p1.destinationMACAddress = MacAddress.ZERO; p1.payload = new Data("xyzzy".getBytes()); BasePacket copy1 = (BasePacket) p1.clone(); assertThat(p1, equalTo(copy1)); }
@Override public void execute( RunConfiguration runConfiguration, ExecutionConfiguration executionConfiguration, AbstractMeta meta, VariableSpace variableSpace, Repository repository ) throws KettleException { DefaultRunConfiguration defaultRunConfiguration = (DefaultRunConfiguration) runConfiguration; if ( executionConfiguration instanceof TransExecutionConfiguration ) { configureTransExecution( (TransExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta, repository ); } if ( executionConfiguration instanceof JobExecutionConfiguration ) { configureJobExecution( (JobExecutionConfiguration) executionConfiguration, defaultRunConfiguration, variableSpace, meta, repository ); } variableSpace.setVariable( "engine", null ); variableSpace.setVariable( "engine.remote", null ); variableSpace.setVariable( "engine.scheme", null ); variableSpace.setVariable( "engine.url", null ); }
@Test public void testExecuteRemoteNotFoundJob() throws Exception { DefaultRunConfiguration defaultRunConfiguration = new DefaultRunConfiguration(); defaultRunConfiguration.setName( "Default Configuration" ); defaultRunConfiguration.setLocal( false ); defaultRunConfiguration.setRemote( true ); defaultRunConfiguration.setServer( "Test Server" ); JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(); lenient().doReturn( slaveServer ).when( abstractMeta ).findSlaveServer( null ); try { defaultRunConfigurationExecutor .execute( defaultRunConfiguration, jobExecutionConfiguration, abstractMeta, variableSpace, null ); fail(); } catch ( KettleException e ) { // expected } }
@JsonProperty public Map<String, Object> getFields() { Map<String, Object> genericFields = Maps.newHashMap(); // strip out common "fields" that we report as individual properties for (Map.Entry<String, Object> entry : message.getFieldsEntries()) { if (!RESERVED_FIELDS.contains(entry.getKey())) { genericFields.put(entry.getKey(), entry.getValue()); } } return genericFields; }
@Test public void testGetFields() throws Exception { assertEquals(new HashMap<String, Object>(), messageSummary.getFields()); message.addField("foo", "bar"); assertEquals(ImmutableMap.of("foo", "bar"), messageSummary.getFields()); }
@VisibleForTesting Manifest createManifest(Class mainClass, String defaultJobName) { Manifest manifest = new Manifest(); manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0"); boolean classHasMainMethod = false; try { Class returnType = mainClass.getMethod("main", String[].class).getReturnType(); if (returnType == Void.TYPE) { classHasMainMethod = true; } else { LOG.warn( "No Main-Class will be set in jar because main method in {} returns {}, expected void", mainClass, returnType); } } catch (NoSuchMethodException e) { LOG.warn("No Main-Class will be set in jar because {} lacks a main method.", mainClass); } if (classHasMainMethod) { manifest.getMainAttributes().put(Name.MAIN_CLASS, mainClass.getName()); } return manifest; }
@Test public void testCreateManifest_withoutMainMethod() { Manifest manifest = jarCreator.createManifest(EmptyPipelineRunner.class, "job"); assertNull(manifest.getMainAttributes().getValue(Name.MAIN_CLASS)); }
@VisibleForTesting static void validateFips(final KsqlConfig config, final KsqlRestConfig restConfig) { if (config.getBoolean(ConfluentConfigs.ENABLE_FIPS_CONFIG)) { final FipsValidator fipsValidator = ConfluentConfigs.buildFipsValidator(); // validate cipher suites and TLS version validateCipherSuites(fipsValidator, restConfig); // validate broker validateBroker(fipsValidator, config); // validate ssl endpoint algorithm validateSslEndpointAlgo(fipsValidator, restConfig); // validate schema registry url validateSrUrl(fipsValidator, restConfig); // validate all listeners validateListeners(fipsValidator, restConfig); log.info("FIPS mode enabled for ksqlDB!"); } }
@Test public void shouldFailOnInvalidEnabledProtocols() { // Given: final KsqlConfig config = configWith(ImmutableMap.of( ConfluentConfigs.ENABLE_FIPS_CONFIG, true )); final String wrongEnabledProtocols = "TLSv1.0"; final KsqlRestConfig restConfig = new KsqlRestConfig(ImmutableMap.<String, Object>builder() .put(KsqlRestConfig.SSL_CIPHER_SUITES_CONFIG, Collections.singletonList("TLS_RSA_WITH_AES_256_CCM")) .put(KsqlRestConfig.SSL_ENABLED_PROTOCOLS_CONFIG, Collections.singletonList(wrongEnabledProtocols)) .build() ); // When: final Exception e = assertThrows( SecurityException.class, () -> KsqlServerMain.validateFips(config, restConfig) ); // Then: assertThat(e.getMessage(), containsString( "FIPS 140-2 Configuration Error, invalid TLS versions: " + wrongEnabledProtocols)); }
public void flush() throws IOException { FileChannel channel = channelOrNull(); if (channel != null) channel.force(true); }
@Test public void testFlush() throws IOException { File nonExistentFile = TestUtils.tempFile(); assertTrue(nonExistentFile.delete()); try (TransactionIndex testIndex = new TransactionIndex(0, nonExistentFile)) { testIndex.flush(); testIndex.append(new AbortedTxn(0L, 0, 10, 2)); testIndex.flush(); assertNotEquals(0, testIndex.file().length()); } }
public SearchSourceBuilder create(SearchesConfig config) { return create(SearchCommand.from(config)); }
@Test void scrollSearchDoesNotHighlight() { final SearchSourceBuilder search = this.searchRequestFactory.create(ChunkCommand.builder() .indices(Collections.singleton("graylog_0")) .range(RANGE) .build()); assertThat(search.toString()).doesNotContain("\"highlight\":"); }