focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static void validatePolymorhpicInfo(PolymorphicInfo info) { if (info.getPcaVersion() != 1) { logger.error("Unsupported PCA version {}", info.getPcaVersion()); throw new ClientException("Polymorphic info is not correct"); } int polymorphicFlags = info.getFlags().intValue(); boolean randomizedPip = (polymorphicFlags & 32) != 0; boolean compressedEncoding = (polymorphicFlags & 4) != 0; if (!randomizedPip || !compressedEncoding) { logger.error("Polymorphic flags incorrect randomizedPip: {} compressedEncoding: {}", randomizedPip, compressedEncoding); throw new ClientException("Polymorphic info is not correct"); } }
@Test public void validPolymorhpicInfo() { final PolymorphicInfo info = mapper.read( Hex.decode("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS"), PolymorphicInfo.class); CardValidations.validatePolymorhpicInfo(info); }
@Override public ProxyInvocationHandler parserInterfaceToProxy(Object target, String objectName) { // eliminate the bean without two phase annotation. Set<String> methodsToProxy = this.tccProxyTargetMethod(target); if (methodsToProxy.isEmpty()) { return null; } // register resource and enhance with interceptor DefaultResourceRegisterParser.get().registerResource(target, objectName); return new TccActionInterceptorHandler(target, methodsToProxy); }
@Test public void testNestTcc_required_new_should_both_commit() throws Exception { TccActionImpl tccAction = new TccActionImpl(); TccAction tccActionProxy = ProxyUtil.createProxy(tccAction, "oldtccAction"); Assertions.assertNotNull(tccActionProxy); NestTccActionImpl nestTccAction = new NestTccActionImpl(); nestTccAction.setTccAction(tccActionProxy); //when ProxyInvocationHandler proxyInvocationHandler = DefaultInterfaceParser.get().parserInterfaceToProxy(nestTccAction, nestTccAction.getClass().getName()); //then Assertions.assertNotNull(proxyInvocationHandler); //when NestTccAction nestTccActionProxy = ProxyUtil.createProxy(nestTccAction, "oldnestTccActionProxy"); //then Assertions.assertNotNull(nestTccActionProxy); // transaction commit test GlobalTransaction tx = GlobalTransactionContext.getCurrentOrCreate(); try { tx.begin(60000, "testBiz"); boolean result = nestTccActionProxy.prepareNestRequiredNew(null, 2); Assertions.assertTrue(result); if (result) { tx.commit(); } else { tx.rollback(); } } catch (Exception exx) { tx.rollback(); throw exx; } Assertions.assertTrue(nestTccAction.isCommit()); Assertions.assertTrue(tccAction.isCommit()); }
@Override public boolean test(Pickle pickle) { URI picklePath = pickle.getUri(); if (!lineFilters.containsKey(picklePath)) { return true; } for (Integer line : lineFilters.get(picklePath)) { if (Objects.equals(line, pickle.getLocation().getLine()) || Objects.equals(line, pickle.getScenarioLocation().getLine()) || pickle.getExamplesLocation().map(Location::getLine).map(line::equals).orElse(false) || pickle.getRuleLocation().map(Location::getLine).map(line::equals).orElse(false) || pickle.getFeatureLocation().map(Location::getLine).map(line::equals).orElse(false)) { return true; } } return false; }
@Test void does_not_match_example_header() { LinePredicate predicate = new LinePredicate(singletonMap( featurePath, singletonList(6))); assertFalse(predicate.test(firstPickle)); assertFalse(predicate.test(secondPickle)); assertFalse(predicate.test(thirdPickle)); assertFalse(predicate.test(fourthPickle)); }
public void clear() { if (this.numFields > 0) { this.numFields = 0; this.firstModifiedPos = 0; } }
@Test void testClear() throws IOException { Record record = new Record(new IntValue(42)); record.write(this.out); assertThat(record.getField(0, IntValue.class).getValue()).isEqualTo(42); record.setField(0, new IntValue(23)); record.write(this.out); assertThat(record.getField(0, IntValue.class).getValue()).isEqualTo(23); record.clear(); assertThat(record.getNumFields()).isZero(); Record record2 = new Record(new IntValue(42)); record2.read(in); assertThat(record2.getField(0, IntValue.class).getValue()).isEqualTo(42); record2.read(in); assertThat(record2.getField(0, IntValue.class).getValue()).isEqualTo(23); }
public void execute() { Optional<String> login = configuration.get(CoreProperties.LOGIN); Optional<String> password = configuration.get(CoreProperties.PASSWORD); String warningMessage = null; if (password.isPresent()) { warningMessage = PASSWORD_WARN_MESSAGE; } else if (login.isPresent()) { warningMessage = LOGIN_WARN_MESSAGE; } if (warningMessage != null) { if (isScannerDotNet()) { warningMessage += SCANNER_DOTNET_WARN_MESSAGE; } LOG.warn(warningMessage); analysisWarnings.addUnique(warningMessage); } }
@Test public void execute_whenUsingPassword_shouldAddWarning() { settings.setProperty(CoreProperties.LOGIN, "test"); settings.setProperty(CoreProperties.PASSWORD, "winner winner chicken dinner"); underTest.execute(); verify(analysisWarnings, times(1)).addUnique(PASSWORD_WARN_MESSAGE); Assertions.assertThat(logger.logs(Level.WARN)).contains(PASSWORD_WARN_MESSAGE); }
@Override public RSet<V> get(final K key) { String keyHash = keyHash(key); final String setName = getValuesName(keyHash); return new RedissonSet<V>(codec, commandExecutor, setName, null) { @Override public RFuture<Boolean> addAsync(V value) { return RedissonSetMultimap.this.putAsync(key, value); } @Override public RFuture<Boolean> addAllAsync(Collection<? extends V> c) { return RedissonSetMultimap.this.putAllAsync(key, c); } @Override public RFuture<Boolean> removeAsync(Object value) { return RedissonSetMultimap.this.removeAsync(key, value); } @Override public RFuture<Boolean> removeAllAsync(Collection<?> c) { if (c.isEmpty()) { return new CompletableFutureWrapper<>(false); } List<Object> args = new ArrayList<Object>(c.size() + 1); args.add(encodeMapKey(key)); encode(args, c); return commandExecutor.evalWriteAsync(RedissonSetMultimap.this.getRawName(), codec, RedisCommands.EVAL_BOOLEAN_AMOUNT, "local count = 0;" + "for i=2, #ARGV, 5000 do " + "count = count + redis.call('srem', KEYS[2], unpack(ARGV, i, math.min(i+4999, table.getn(ARGV)))) " + "end; " + "if count > 0 then " + "if redis.call('scard', KEYS[2]) == 0 then " + "redis.call('hdel', KEYS[1], ARGV[1]); " + "end; " + "return 1;" + "end;" + "return 0; ", Arrays.<Object>asList(RedissonSetMultimap.this.getRawName(), setName), args.toArray()); } @Override public RFuture<Boolean> deleteAsync() { ByteBuf keyState = encodeMapKey(key); return RedissonSetMultimap.this.fastRemoveAsync(Arrays.asList(keyState), Arrays.asList(RedissonSetMultimap.this.getRawName(), setName), RedisCommands.EVAL_BOOLEAN_AMOUNT); } @Override public RFuture<Boolean> clearExpireAsync() { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Long> remainTimeToLiveAsync() { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Void> renameAsync(String newName) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Boolean> renamenxAsync(String newName) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } }; }
@Test public void testKeySize() { RSetMultimap<SimpleKey, SimpleValue> map = redisson.getSetMultimap("test1"); map.put(new SimpleKey("0"), new SimpleValue("1")); map.put(new SimpleKey("0"), new SimpleValue("2")); map.put(new SimpleKey("1"), new SimpleValue("3")); assertThat(map.keySize()).isEqualTo(2); assertThat(map.keySet().size()).isEqualTo(2); map.fastRemove(new SimpleKey("0")); Set<SimpleValue> s = map.get(new SimpleKey("0")); assertThat(s).isEmpty(); assertThat(map.keySize()).isEqualTo(1); }
@VisibleForTesting static int checkJar(Path file) throws Exception { final URI uri = file.toUri(); int numSevereIssues = 0; try (final FileSystem fileSystem = FileSystems.newFileSystem( new URI("jar:file", uri.getHost(), uri.getPath(), uri.getFragment()), Collections.emptyMap())) { if (isTestJarAndEmpty(file, fileSystem.getPath("/"))) { return 0; } if (!noticeFileExistsAndIsValid(fileSystem.getPath("META-INF", "NOTICE"), file)) { numSevereIssues++; } if (!licenseFileExistsAndIsValid(fileSystem.getPath("META-INF", "LICENSE"), file)) { numSevereIssues++; } numSevereIssues += getNumLicenseFilesOutsideMetaInfDirectory(file, fileSystem.getPath("/")); numSevereIssues += getFilesWithIncompatibleLicenses(file, fileSystem.getPath("/")); } return numSevereIssues; }
@Test void testRejectedOnLicenseFileInRoot(@TempDir Path tempDir) throws Exception { assertThat( JarFileChecker.checkJar( createJar( tempDir, Entry.fileEntry(VALID_NOTICE_CONTENTS, VALID_NOTICE_PATH), Entry.fileEntry(VALID_LICENSE_CONTENTS, VALID_LICENSE_PATH), Entry.fileEntry( VALID_LICENSE_CONTENTS, Arrays.asList("some_custom_license"))))) .isEqualTo(1); }
public Set<SourceName> sourcesWithField( final Optional<SourceName> source, final ColumnName target ) { if (!source.isPresent()) { return sourceSchemas.entrySet().stream() .filter(e -> e.getValue().findColumn(target).isPresent()) .map(Entry::getKey) .collect(Collectors.toSet()); } final SourceName sourceName = source.get(); final LogicalSchema sourceSchema = sourceSchemas.get(sourceName); if (sourceSchema == null) { return ImmutableSet.of(); } return sourceSchema.findColumn(target).isPresent() ? ImmutableSet.of(sourceName) : ImmutableSet.of(); }
@Test public void shouldFindNoQualifiedField() { assertThat(sourceSchemas.sourcesWithField(Optional.of(ALIAS_1), V2), is(empty())); }
@Override public AbortTransactionResult abortTransaction(AbortTransactionSpec spec, AbortTransactionOptions options) { AdminApiFuture.SimpleAdminApiFuture<TopicPartition, Void> future = AbortTransactionHandler.newFuture(Collections.singleton(spec.topicPartition())); AbortTransactionHandler handler = new AbortTransactionHandler(spec, logContext); invokeDriver(handler, future, options.timeoutMs); return new AbortTransactionResult(future.all()); }
@Test public void testAbortTransactionFindLeaderAfterDisconnect() throws Exception { MockTime time = new MockTime(); int retryBackoffMs = 100; Cluster cluster = mockCluster(3, 0); Map<String, Object> configOverride = newStrMap(AdminClientConfig.RETRY_BACKOFF_MS_CONFIG, "" + retryBackoffMs); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(time, cluster, configOverride)) { TopicPartition topicPartition = new TopicPartition("foo", 13); AbortTransactionSpec abortSpec = new AbortTransactionSpec( topicPartition, 12345L, (short) 15, 200); Iterator<Node> nodeIterator = env.cluster().nodes().iterator(); Node firstLeader = nodeIterator.next(); expectMetadataRequest(env, topicPartition, firstLeader); WriteTxnMarkersResponse response = writeTxnMarkersResponse(abortSpec, Errors.NONE); env.kafkaClient().prepareResponseFrom( request -> { // We need a sleep here because the client will attempt to // backoff after the disconnect time.sleep(retryBackoffMs); return request instanceof WriteTxnMarkersRequest; }, response, firstLeader, true ); Node retryLeader = nodeIterator.next(); expectMetadataRequest(env, topicPartition, retryLeader); env.kafkaClient().prepareResponseFrom( request -> request instanceof WriteTxnMarkersRequest, response, retryLeader ); AbortTransactionResult result = env.adminClient().abortTransaction(abortSpec); assertNull(result.all().get()); } }
public static void parseSAX(InputStream is, ContentHandler contentHandler, ParseContext context) throws TikaException, IOException, SAXException { SAXParser saxParser = context.get(SAXParser.class); PoolSAXParser poolSAXParser = null; if (saxParser == null) { poolSAXParser = acquireSAXParser(); saxParser = poolSAXParser.getSAXParser(); } try { saxParser.parse(is, new OfflineContentHandler(contentHandler)); } finally { if (poolSAXParser != null) { releaseParser(poolSAXParser); } } }
@Test public void testExternalEntity() throws Exception { String xml = "<!DOCTYPE foo [" + " <!ENTITY bar SYSTEM \"http://127.234.172.38:7845/bar\">" + " ]><foo>&bar;</foo>"; try { XMLReaderUtils.parseSAX(new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8)), new ToTextContentHandler(), new ParseContext()); } catch (ConnectException e) { fail("Parser tried to access the external DTD:" + e); } }
void unregisterSourceReader(int subtaskId, int attemptNumber) { final Map<Integer, ReaderInfo> attemptReaders = registeredReaders.get(subtaskId); if (attemptReaders != null) { attemptReaders.remove(attemptNumber); if (attemptReaders.isEmpty()) { registeredReaders.remove(subtaskId); } } }
@Test void testUnregisterUnregisteredReader() { context.unregisterSourceReader(0, 0); }
@Override public boolean containsDouble(K name, double value) { return false; }
@Test public void testContainsDouble() { assertFalse(HEADERS.containsDouble("name1", 1)); }
@ConstantFunction(name = "mod", argTypes = {TINYINT, TINYINT}, returnType = TINYINT) public static ConstantOperator modTinyInt(ConstantOperator first, ConstantOperator second) { if (second.getTinyInt() == 0) { return ConstantOperator.createNull(Type.TINYINT); } return ConstantOperator.createTinyInt((byte) (first.getTinyInt() % second.getTinyInt())); }
@Test public void modTinyInt() { assertEquals(0, ScalarOperatorFunctions.modTinyInt(O_TI_10, O_TI_10).getTinyInt()); }
@PUT @Path("{id}/add_router_interface") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response addRouterInterface(@PathParam("id") String id, InputStream input) throws IOException { log.trace(String.format(MESSAGE_ROUTER_IFACE, "UPDATE " + id)); String inputStr = IOUtils.toString(input, REST_UTF8); if (!haService.isActive() && !DEFAULT_ACTIVE_IP_ADDRESS.equals(haService.getActiveIp())) { return syncPut(haService, ROUTERS, "add_router_interface", id, inputStr); } final NeutronRouterInterface osRouterIface = (NeutronRouterInterface) jsonToModelEntity(inputStr, NeutronRouterInterface.class); adminService.addRouterInterface(osRouterIface); return status(Response.Status.OK).build(); }
@Test public void testAddRouterInterfaceWithNonexistId() { expect(mockOpenstackHaService.isActive()).andReturn(true).anyTimes(); replay(mockOpenstackHaService); mockOpenstackRouterAdminService.addRouterInterface(anyObject()); expectLastCall().andThrow(new IllegalArgumentException()); replay(mockOpenstackRouterAdminService); final WebTarget wt = target(); InputStream jsonStream = OpenstackRouterWebResourceTest.class .getResourceAsStream("openstack-router-interface.json"); Response response = wt.path(PATH + "/f49a1319-423a-4ee6-ba54-1d95a4f6cc68/add_router_interface") .request(MediaType.APPLICATION_JSON_TYPE) .put(Entity.json(jsonStream)); final int status = response.getStatus(); assertThat(status, is(400)); verify(mockOpenstackRouterAdminService); }
public AstNode rewrite(final AstNode node, final C context) { return rewriter.process(node, context); }
@Test public void shouldRewriteCTAS() { // Given: final CreateTableAsSelect ctas = new CreateTableAsSelect( location, sourceName, query, false, false, csasProperties ); when(mockRewriter.apply(query, context)).thenReturn(rewrittenQuery); // When: final AstNode rewritten = rewriter.rewrite(ctas, context); // Then: assertThat( rewritten, equalTo( new CreateTableAsSelect( location, sourceName, rewrittenQuery, false, false, csasProperties ) ) ); }
@Override public boolean betterThan(Num criterionValue1, Num criterionValue2) { return criterionValue1.isGreaterThan(criterionValue2); }
@Test public void betterThan() { AnalysisCriterion criterion = getCriterion(); assertTrue(criterion.betterThan(numOf(50), numOf(45))); assertFalse(criterion.betterThan(numOf(45), numOf(50))); }
private Optional<BindingTableRule> findBindingTableRule(final Collection<String> logicTableNames) { for (String each : logicTableNames) { Optional<BindingTableRule> result = findBindingTableRule(each); if (result.isPresent()) { return result; } } return Optional.empty(); }
@Test void assertGetBindingTableRuleForNotConfiguration() { assertFalse(createMinimumShardingRule().findBindingTableRule("logic_Table").isPresent()); }
public String getFileKey() { return fileKey; }
@Test public void getFileKey_returns_constructor_argument() { assertThat(new CrossProjectDuplicate(FILE_KEY_1, new TextBlock(2, 3)).getFileKey()).isEqualTo(FILE_KEY_1); }
public static void writeFile(byte[] data, String destFilePath, BrokerDesc brokerDesc) throws UserException { BrokerWriter writer = new BrokerWriter(destFilePath, brokerDesc); try { writer.open(); ByteBuffer byteBuffer = ByteBuffer.wrap(data); writer.write(byteBuffer, data.length); } finally { writer.close(); } }
@Test public void testWriteFile(@Mocked TFileBrokerService.Client client, @Mocked GlobalStateMgr globalStateMgr, @Injectable BrokerMgr brokerMgr) throws TException, UserException { // open writer response TBrokerOpenWriterResponse openWriterResponse = new TBrokerOpenWriterResponse(); TBrokerOperationStatus status = new TBrokerOperationStatus(); status.statusCode = TBrokerOperationStatusCode.OK; openWriterResponse.opStatus = status; openWriterResponse.fd = new TBrokerFD(1, 2); FsBroker fsBroker = new FsBroker("127.0.0.1", 99999); new MockUp<ThriftConnectionPool<TFileBrokerService.Client>>() { @Mock public TFileBrokerService.Client borrowObject(TNetworkAddress address, int timeoutMs) throws Exception { return client; } @Mock public void returnObject(TNetworkAddress address, TFileBrokerService.Client object) { return; } @Mock public void invalidateObject(TNetworkAddress address, TFileBrokerService.Client object) { return; } }; try (MockedStatic<ThriftRPCRequestExecutor> thriftConnectionPoolMockedStatic = Mockito.mockStatic(ThriftRPCRequestExecutor.class)) { thriftConnectionPoolMockedStatic.when(() -> ThriftRPCRequestExecutor.call(Mockito.any(), Mockito.any(), Mockito.any())) .thenReturn(openWriterResponse, status); BrokerDesc brokerDesc = new BrokerDesc("broker0", Maps.newHashMap()); byte[] configs = "{'label': 'label0'}".getBytes(StandardCharsets.UTF_8); String destFilePath = "hdfs://127.0.0.1:10000/starrocks/jobs/1/label6/9/configs/jobconfig.json"; try { BrokerUtil.writeFile(configs, destFilePath, brokerDesc); } catch (Exception e) { Assert.fail(e.getMessage()); } } }
public static String getLastPart(String path) { if (path == null) { return null; } int parameterIndex = path.indexOf("?"); path = parameterIndex >= 0 ? path.substring(0, parameterIndex) : path; return path.substring(path.lastIndexOf(File.separator) + 1); }
@Test public void testGetLastPart() { assertNull(URIUtils.getLastPart(null)); assertEquals(URIUtils.getLastPart(""), ""); assertEquals(URIUtils.getLastPart("http://foo/bar"), "bar"); assertEquals(URIUtils.getLastPart("http://foo/bar?moo=x"), "bar"); assertEquals(URIUtils.getLastPart("?"), ""); assertEquals(URIUtils.getLastPart("?moo=x"), ""); assertEquals(URIUtils.getLastPart("/foo/bar"), "bar"); assertEquals(URIUtils.getLastPart("file:/foo/bar"), "bar"); }
@Override public String toString() { return MoreObjects.toStringHelper(this) .add("name", name) .add("uri", this.conf == null ? "" : this.conf.get(HiveConf.ConfVars.METASTORE_URIS.varname)) .toString(); }
@Test public void testToStringWithoutSetConf() { assertThatNoException() .isThrownBy( () -> { HiveCatalog hiveCatalog = new HiveCatalog(); hiveCatalog.toString(); }); }
@Override public String selectForUpdateSkipLocked() { return supportsSelectForUpdateSkipLocked ? " FOR UPDATE SKIP LOCKED" : ""; }
@Test void mariaDB10DoesNotSupportSelectForUpdateSkipLocked() { assertThat(new MariaDbDialect("MariaDB", "10.5.0").selectForUpdateSkipLocked()).isEmpty(); }
public static Set<Result> anaylze(String log) { Set<Result> results = new HashSet<>(); for (Rule rule : Rule.values()) { Matcher matcher = rule.pattern.matcher(log); if (matcher.find()) { results.add(new Result(rule, log, matcher)); } } return results; }
@Test public void nightconfigfixes() throws IOException { CrashReportAnalyzer.Result result = findResultByRule( CrashReportAnalyzer.anaylze(loadLog("/crash-report/night_config_fixes.txt")), CrashReportAnalyzer.Rule.NIGHT_CONFIG_FIXES); }
Span handleStart(Req request, Span span) { if (span.isNoop()) return span; try { parseRequest(request, span); } catch (Throwable t) { propagateIfFatal(t); Platform.get().log("error parsing request {0}", request, t); } finally { // all of the above parsing happened before a timestamp on the span long timestamp = request.startTimestamp(); if (timestamp == 0L) { span.start(); } else { span.start(timestamp); } } return span; }
@Test void handleStart_addsRemoteEndpointWhenParsed() { handler = new RpcHandler(RpcRequestParser.DEFAULT, RpcResponseParser.DEFAULT) { @Override void parseRequest(RpcRequest request, Span span) { span.remoteIpAndPort("1.2.3.4", 0); } }; handler.handleStart(request, span); verify(span).remoteIpAndPort("1.2.3.4", 0); }
@Override public void mark(int readAheadLimit) { mMark = mPosition; }
@Test void testMark() { UnsafeByteArrayInputStream stream = new UnsafeByteArrayInputStream("abc".getBytes(), 1); assertThat(stream.markSupported(), is(true)); stream.mark(2); stream.read(); assertThat(stream.position(), is(2)); stream.reset(); assertThat(stream.position(), is(1)); }
@Override public Long dbSize(RedisClusterNode node) { return execute(node, RedisCommands.DBSIZE); }
@Test public void testDbSize() { RedisClusterNode master = getFirstMaster(); Long size = connection.dbSize(master); assertThat(size).isZero(); }
@Override public Object lock() { return lock; }
@Test public void testLock() { assertNotSame(new DefaultMainAction() { @Override public void run() { } }.lock(), new DefaultMainAction() { @Override public void run() { } }.lock()); }
void initializeConsumer(Set<TopicPartition> taskTopicPartitions) { Map<TopicPartition, Long> topicPartitionOffsets = loadOffsets(taskTopicPartitions); consumer.assign(topicPartitionOffsets.keySet()); log.info("Starting with {} previously uncommitted partitions.", topicPartitionOffsets.values().stream() .filter(this::isUncommitted).count()); topicPartitionOffsets.forEach((topicPartition, offset) -> { // Do not call seek on partitions that don't have an existing offset committed. if (isUncommitted(offset)) { log.trace("Skipping seeking offset for topicPartition: {}", topicPartition); return; } long nextOffsetToCommittedOffset = offset + 1L; log.trace("Seeking to offset {} for topicPartition: {}", nextOffsetToCommittedOffset, topicPartition); consumer.seek(topicPartition, nextOffsetToCommittedOffset); }); }
@Test public void testSeekBehaviorDuringStart() { // Setting up mock behavior. @SuppressWarnings("unchecked") KafkaConsumer<byte[], byte[]> mockConsumer = mock(KafkaConsumer.class); SourceTaskContext mockSourceTaskContext = mock(SourceTaskContext.class); OffsetStorageReader mockOffsetStorageReader = mock(OffsetStorageReader.class); when(mockSourceTaskContext.offsetStorageReader()).thenReturn(mockOffsetStorageReader); Set<TopicPartition> topicPartitions = new HashSet<>(Arrays.asList( new TopicPartition("previouslyReplicatedTopic", 8), new TopicPartition("previouslyReplicatedTopic1", 0), new TopicPartition("previouslyReplicatedTopic", 1), new TopicPartition("newTopicToReplicate1", 1), new TopicPartition("newTopicToReplicate1", 4), new TopicPartition("newTopicToReplicate2", 0) )); long arbitraryCommittedOffset = 4L; long offsetToSeek = arbitraryCommittedOffset + 1L; when(mockOffsetStorageReader.offset(anyMap())).thenAnswer(testInvocation -> { Map<String, Object> topicPartitionOffsetMap = testInvocation.getArgument(0); String topicName = topicPartitionOffsetMap.get("topic").toString(); // Only return the offset for previously replicated topics. // For others, there is no value set. if (topicName.startsWith("previouslyReplicatedTopic")) { topicPartitionOffsetMap.put("offset", arbitraryCommittedOffset); } return topicPartitionOffsetMap; }); MirrorSourceTask mirrorSourceTask = new MirrorSourceTask(mockConsumer, null, null, new DefaultReplicationPolicy(), null); mirrorSourceTask.initialize(mockSourceTaskContext); // Call test subject mirrorSourceTask.initializeConsumer(topicPartitions); // Verifications // Ensure all the topic partitions are assigned to consumer verify(mockConsumer, times(1)).assign(topicPartitions); // Ensure seek is only called for previously committed topic partitions. verify(mockConsumer, times(1)) .seek(new TopicPartition("previouslyReplicatedTopic", 8), offsetToSeek); verify(mockConsumer, times(1)) .seek(new TopicPartition("previouslyReplicatedTopic", 1), offsetToSeek); verify(mockConsumer, times(1)) .seek(new TopicPartition("previouslyReplicatedTopic1", 0), offsetToSeek); verifyNoMoreInteractions(mockConsumer); }
public ServerStatus getServerStatus() { return serverStatus; }
@Test void testUpdaterFromConsistency2() { when(protocolManager.getCpProtocol()).thenReturn(cpProtocol); ServerStatusManager serverStatusManager = new ServerStatusManager(protocolManager, switchDomain); ServerStatusManager.ServerStatusUpdater updater = serverStatusManager.new ServerStatusUpdater(); //then updater.run(); //then assertEquals(ServerStatus.DOWN, serverStatusManager.getServerStatus()); }
public static void main(String[] args) { var mage = new Hero.Builder(Profession.MAGE, "Riobard") .withHairColor(HairColor.BLACK) .withWeapon(Weapon.DAGGER) .build(); LOGGER.info(mage.toString()); var warrior = new Hero.Builder(Profession.WARRIOR, "Amberjill") .withHairColor(HairColor.BLOND) .withHairType(HairType.LONG_CURLY).withArmor(Armor.CHAIN_MAIL).withWeapon(Weapon.SWORD) .build(); LOGGER.info(warrior.toString()); var thief = new Hero.Builder(Profession.THIEF, "Desmond") .withHairType(HairType.BALD) .withWeapon(Weapon.BOW) .build(); LOGGER.info(thief.toString()); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
@Override public final long getEstimatedSizeBytes(PipelineOptions options) throws IOException { // This implementation of method getEstimatedSizeBytes is provided to simplify subclasses. Here // we perform the size estimation of files and file patterns using the interface provided by // FileSystem. String fileOrPattern = fileOrPatternSpec.get(); if (mode == Mode.FILEPATTERN) { Long maybeNumBytes = filesSizeBytes.get(); if (maybeNumBytes != null) { return maybeNumBytes; } long totalSize = 0; List<Metadata> allMatches = FileSystems.match(fileOrPattern, emptyMatchTreatment).metadata(); for (Metadata metadata : allMatches) { totalSize += metadata.sizeBytes(); } LOG.info( "Filepattern {} matched {} files with total size {}", fileOrPattern, allMatches.size(), totalSize); filesSizeBytes.compareAndSet(null, totalSize); return totalSize; } else { long start = getStartOffset(); long end = Math.min(getEndOffset(), getMaxEndOffset(options)); return end - start; } }
@Test public void testEstimatedSizeOfFile() throws Exception { List<String> data = createStringDataset(3, 50); String fileName = "file"; File file = createFileWithData(fileName, data); TestFileBasedSource source = new TestFileBasedSource(file.getPath(), 64, null); ExpectedLogs.LogSaver logSaver = new ExpectedLogs.LogSaver(); LogManager.getLogManager().getLogger("").addHandler(logSaver); assertEquals(file.length(), source.getEstimatedSizeBytes(null)); ExpectedLogs.verifyLogged( ExpectedLogs.matcher( Level.INFO, String.format("matched 1 files with total size %d", file.length())), logSaver); LogManager.getLogManager().getLogger("").removeHandler(logSaver); logSaver = new ExpectedLogs.LogSaver(); LogManager.getLogManager().getLogger("").addHandler(logSaver); assertEquals(file.length(), source.getEstimatedSizeBytes(null)); // Second call get result from cache and does not send match request ExpectedLogs.verifyNotLogged( ExpectedLogs.matcher( Level.INFO, String.format("matched 1 files with total size %d", file.length())), logSaver); LogManager.getLogManager().getLogger("").removeHandler(logSaver); }
@Override public int launch(AgentLaunchDescriptor descriptor) { LogConfigurator logConfigurator = new LogConfigurator("agent-launcher-logback.xml"); return logConfigurator.runWithLogger(() -> doLaunch(descriptor)); }
@Test public void shouldDownloadLauncherJarIfLocalCopyIsStale_butShouldReturnWithoutDownloadingOrLaunchingAgent() throws Exception { File launcher = randomFile(AGENT_LAUNCHER_JAR); long original = launcher.length(); File agentFile = randomFile(AGENT_BINARY_JAR); long originalAgentLength = agentFile.length(); new AgentLauncherImpl().launch(launchDescriptor()); assertThat(launcher.length(), not(original)); assertThat(agentFile.length(), is(originalAgentLength)); }
public static SmartFilterTestExecutionResultDTO execSmartFilterTest(SmartFilterTestExecutionDTO execData) { Predicate<TopicMessageDTO> predicate; try { predicate = MessageFilters.createMsgFilter( execData.getFilterCode(), MessageFilterTypeDTO.GROOVY_SCRIPT ); } catch (Exception e) { log.info("Smart filter '{}' compilation error", execData.getFilterCode(), e); return new SmartFilterTestExecutionResultDTO() .error("Compilation error : " + e.getMessage()); } try { var result = predicate.test( new TopicMessageDTO() .key(execData.getKey()) .content(execData.getValue()) .headers(execData.getHeaders()) .offset(execData.getOffset()) .partition(execData.getPartition()) .timestamp( Optional.ofNullable(execData.getTimestampMs()) .map(ts -> OffsetDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneOffset.UTC)) .orElse(null)) ); return new SmartFilterTestExecutionResultDTO() .result(result); } catch (Exception e) { log.info("Smart filter {} execution error", execData, e); return new SmartFilterTestExecutionResultDTO() .error("Execution error : " + e.getMessage()); } }
@Test void execSmartFilterTestReturnsErrorOnFilterApplyError() { var result = execSmartFilterTest( new SmartFilterTestExecutionDTO() .filterCode("return 1/0") ); assertThat(result.getResult()).isNull(); assertThat(result.getError()).containsIgnoringCase("execution error"); }
public int doWork() { final long nowNs = nanoClock.nanoTime(); trackTime(nowNs); int workCount = 0; workCount += processTimers(nowNs); if (!asyncClientCommandInFlight) { workCount += clientCommandAdapter.receive(); } workCount += drainCommandQueue(); workCount += trackStreamPositions(workCount, nowNs); workCount += nameResolver.doWork(cachedEpochClock.time()); workCount += freeEndOfLifeResources(ctx.resourceFreeLimit()); return workCount; }
@Test void shouldAddSingleCounter() { final long registrationId = driverProxy.addCounter( COUNTER_TYPE_ID, counterKeyAndLabel, COUNTER_KEY_OFFSET, COUNTER_KEY_LENGTH, counterKeyAndLabel, COUNTER_LABEL_OFFSET, COUNTER_LABEL_LENGTH); driverConductor.doWork(); verify(mockClientProxy).onCounterReady(eq(registrationId), anyInt()); verify(spyCountersManager).newCounter( eq(COUNTER_TYPE_ID), any(), anyInt(), eq(COUNTER_KEY_LENGTH), any(), anyInt(), eq(COUNTER_LABEL_LENGTH)); }
@Override public void preflight(final Path workdir, final String filename) throws BackgroundException { if(!validate(filename)) { throw new InvalidFilenameException(MessageFormat.format(LocaleFactory.localizedString("Cannot create {0}", "Error"), filename)); } // File/directory creation summary: // - Directories with ctera:writepermission but no ctera:createdirectoriespermission allow for file creation only. // - Directories with ctera:createdirectoriespermission but no ctera:writepermission allow for directory and file creation. // - Directories with only ctera:readpermission do not allow for file nor directory creation, for listing only. // In other words: // - file creation is allowed if either ctera:createdirectoriespermission or ctera:writepermission is set or both are set // - directory creation is allowed if ctera:createdirectoriespermission is set. // ctera:createdirectoriespermission or ctera:writepermission try { assumeRole(workdir, WRITEPERMISSION); } catch(AccessDeniedException e) { // ignore and try second option assumeRole(workdir, CREATEDIRECTORIESPERMISSION); } }
@Test public void testPreflightNoPermissions() throws Exception { final Path file = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); file.setAttributes(file.attributes().withAcl(new Acl(new Acl.CanonicalUser()))); assertThrows(AccessDeniedException.class, () -> new CteraTouchFeature(session).preflight(file, new AlphanumericRandomStringService().random())); }
@Override public SymbolTable getSymbolTable(String symbolTableName) { try { SymbolTableMetadata metadata = METADATA_EXTRACTOR.extractMetadata(symbolTableName); String serverNodeUri = metadata.getServerNodeUri(); String tableName = metadata.getSymbolTableName(); boolean isRemote = metadata.isRemote(); // First check the cache. SymbolTable symbolTable = _cache.getIfPresent(tableName); if (symbolTable != null) { return symbolTable; } // If this is not a remote table, and we didn't find it in the cache, cry foul. if (!isRemote) { throw new IllegalStateException("Unable to fetch symbol table with name: " + symbolTableName); } // Ok, we didn't find it in the cache, let's go query the service the table was served from. String url = serverNodeUri + "/" + SYMBOL_TABLE_URI_PATH + "/" + tableName; HttpURLConnection connection = openConnection(url); try { if (DEFAULT_HEADERS != null) { DEFAULT_HEADERS.entrySet().forEach(entry -> connection.setRequestProperty(entry.getKey(), entry.getValue())); } if (HEADER_PROVIDER != null) { HEADER_PROVIDER.getHeaders().entrySet().forEach(entry -> connection.setRequestProperty(entry.getKey(), entry.getValue())); } connection.setRequestProperty(ACCEPT_HEADER, ProtobufDataCodec.DEFAULT_HEADER); connection.setRequestProperty(SYMBOL_TABLE_HEADER, Boolean.toString(true)); int responseCode = connection.getResponseCode(); if (responseCode == HttpURLConnection.HTTP_OK) { InputStream inputStream = connection.getInputStream(); // Deserialize symbolTable = SymbolTableSerializer.fromInputStream(inputStream, CODEC, null); } else { throw new IOException("Unexpected response status: " + responseCode); } } finally { connection.disconnect(); } // Cache the retrieved table. _cache.put(tableName, symbolTable); return symbolTable; } catch (MalformedURLException ex) { LOGGER.error("Failed to construct symbol table URL from symbol table name: " + symbolTableName, ex); } catch (Exception e) { LOGGER.error("Failed to fetch remote symbol table with name: " + symbolTableName, e); } throw new IllegalStateException("Unable to fetch symbol table with name: " + symbolTableName); }
@Test(expectedExceptions = IllegalStateException.class) public void testRemoteSymbolTableMalformedUrl() { String symbolTableName = "https\\someservice:100|tableName"; new DefaultSymbolTableProvider().getSymbolTable(symbolTableName); }
public String convert(ILoggingEvent event) { StringBuilder sb = new StringBuilder(); int pri = facility + LevelToSyslogSeverity.convert(event); sb.append("<"); sb.append(pri); sb.append(">"); sb.append(computeTimeStampString(event.getTimeStamp())); sb.append(' '); sb.append(localHostName); sb.append(' '); return sb.toString(); }
@Test @Disabled public void hostnameShouldNotIncludeDomain() throws Exception { // RFC 3164, section 4.1.2: // The Domain Name MUST NOT be included in the HOSTNAME field. String host = HOSTNAME; final int firstPeriod = host.indexOf("."); if (firstPeriod != -1) { host = host.substring(0, firstPeriod); } LoggingEvent le = createLoggingEvent(); calendar.set(2012, Calendar.OCTOBER, 11, 22, 14, 15); le.setTimeStamp(calendar.getTimeInMillis()); assertEquals("<191>Oct 11 22:14:15 " + host + " ", converter.convert(le)); }
@Override public TaskView toTaskView(String responseBody) { ArrayList<String> exceptions = new ArrayList<>(); try { final Map map = (Map) GSON.fromJson(responseBody, Object.class); if (map.isEmpty()) { exceptions.add("The Json for Task View cannot be empty"); } else { if (!(map.containsKey("displayValue") && map.get("displayValue") instanceof String)) { exceptions.add("The Json for Task View must contain a not-null 'displayValue' of type String"); } if (!(map.containsKey("template") && map.get("template") instanceof String)) { exceptions.add("The Json for Task View must contain a not-null 'template' of type String"); } } if (!exceptions.isEmpty()) { throw new RuntimeException(StringUtils.join(exceptions, ", ")); } return new TaskView() { @Override public String displayValue() { return (String) map.get("displayValue"); } @Override public String template() { return (String) map.get("template"); } }; } catch (Exception e) { LOGGER.error("Error occurred while converting the Json to Task View. Error: {}. The Json received was '{}'.", e.getMessage(), responseBody); throw new RuntimeException(String.format("Error occurred while converting the Json to Task View. Error: %s.", e.getMessage())); } }
@Test public void shouldThrowExceptionForWrongJsonWhileCreatingTaskViewFromResponse() { String jsonResponse1 = "{}"; try { new JsonBasedTaskExtensionHandler_V1().toTaskView(jsonResponse1); fail("should throw exception"); } catch (Exception e) { assertThat(e.getMessage(), is("Error occurred while converting the Json to Task View. Error: The Json for Task View cannot be empty.")); } String jsonResponse2 = "{\"template\":\"<html>junk</html>\"}"; try { new JsonBasedTaskExtensionHandler_V1().toTaskView(jsonResponse2); fail("should throw exception"); } catch (Exception e) { assertThat(e.getMessage(), is("Error occurred while converting the Json to Task View. Error: The Json for Task View must contain a not-null 'displayValue' of type String.")); } String jsonResponse3 = "{\"displayValue\":\"MyTaskPlugin\"}"; try { new JsonBasedTaskExtensionHandler_V1().toTaskView(jsonResponse3); fail("should throw exception"); } catch (Exception e) { assertThat(e.getMessage(), is("Error occurred while converting the Json to Task View. Error: The Json for Task View must contain a not-null 'template' of type String.")); } String jsonResponse4 = "{\"displayValue\":null, \"template\":\"<html>junk</html>\"}"; try { new JsonBasedTaskExtensionHandler_V1().toTaskView(jsonResponse4); fail("should throw exception"); } catch (Exception e) { assertThat(e.getMessage(), is("Error occurred while converting the Json to Task View. Error: The Json for Task View must contain a not-null 'displayValue' of type String.")); } String jsonResponse5 = "{\"displayValue\":\"MyTaskPlugin\", \"template\":true}"; try { new JsonBasedTaskExtensionHandler_V1().toTaskView(jsonResponse5); fail("should throw exception"); } catch (Exception e) { assertThat(e.getMessage(), is("Error occurred while converting the Json to Task View. Error: The Json for Task View must contain a not-null 'template' of type String.")); } String jsonResponse6 = "{\"displayValue\":true, \"template\":null}"; try { new JsonBasedTaskExtensionHandler_V1().toTaskView(jsonResponse6); fail("should throw exception"); } catch (Exception e) { assertThat(e.getMessage(), is("Error occurred while converting the Json to Task View. Error: The Json for Task View must contain a not-null 'displayValue' of type String, The Json for Task View must contain a not-null 'template' of type String.")); } }
public void killEtlJob(SparkLoadAppHandle handle, String appId, long loadJobId, SparkResource resource) throws UserException { if (resource.isYarnMaster()) { // The appId may be empty when the load job is in PENDING phase. This is because the appId is // parsed from the spark launcher process's output (spark launcher process submit job and then // return appId). In this case, the spark job has still not been submitted, we only need to kill // the spark launcher process. if (Strings.isNullOrEmpty(appId)) { appId = handle.getAppId(); if (Strings.isNullOrEmpty(appId)) { handle.kill(); return; } } killYarnApplication(appId, loadJobId, resource); } else { if (handle != null) { handle.stop(); } } }
@Test public void testKillEtlJob(@Mocked Util util, @Mocked CommandResult commandResult, @Mocked SparkYarnConfigFiles sparkYarnConfigFiles) throws IOException, UserException { new Expectations() { { sparkYarnConfigFiles.prepare(); sparkYarnConfigFiles.getConfigDir(); result = "./yarn_config"; } }; new Expectations() { { commandResult.getReturnCode(); result = 0; } }; new Expectations() { { Util.executeCommand(anyString, (String[]) any, anyLong); minTimes = 0; result = commandResult; } }; SparkResource resource = new SparkResource(resourceName); Map<String, String> sparkConfigs = resource.getSparkConfigs(); sparkConfigs.put("spark.master", "yarn"); sparkConfigs.put("spark.submit.deployMode", "cluster"); sparkConfigs.put("spark.hadoop.yarn.resourcemanager.address", "127.0.0.1:9999"); new Expectations(resource) { { resource.getYarnClientPath(); result = Config.yarn_client_path; } }; SparkEtlJobHandler handler = new SparkEtlJobHandler(); try { handler.killEtlJob(null, appId, loadJobId, resource); } catch (Exception e) { Assert.fail(e.getMessage()); } }
@SuppressWarnings("deprecation") static Object[] buildArgs(final Object[] positionalArguments, final ResourceMethodDescriptor resourceMethod, final ServerResourceContext context, final DynamicRecordTemplate template, final ResourceMethodConfig resourceMethodConfig) { List<Parameter<?>> parameters = resourceMethod.getParameters(); Object[] arguments = Arrays.copyOf(positionalArguments, parameters.size()); fixUpComplexKeySingletonArraysInArguments(arguments); boolean attachmentsDesired = false; for (int i = positionalArguments.length; i < parameters.size(); ++i) { Parameter<?> param = parameters.get(i); try { if (param.getParamType() == Parameter.ParamType.KEY || param.getParamType() == Parameter.ParamType.ASSOC_KEY_PARAM) { Object value = context.getPathKeys().get(param.getName()); if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.CALLBACK) { continue; } else if (param.getParamType() == Parameter.ParamType.PARSEQ_CONTEXT_PARAM || param.getParamType() == Parameter.ParamType.PARSEQ_CONTEXT) { continue; // don't know what to fill in yet } else if (param.getParamType() == Parameter.ParamType.HEADER) { HeaderParam headerParam = param.getAnnotations().get(HeaderParam.class); String value = context.getRequestHeaders().get(headerParam.value()); arguments[i] = value; continue; } //Since we have multiple different types of MaskTrees that can be passed into resource methods, //we must evaluate based on the param type (annotation used) else if (param.getParamType() == Parameter.ParamType.PROJECTION || param.getParamType() == Parameter.ParamType.PROJECTION_PARAM) { arguments[i] = context.getProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.METADATA_PROJECTION_PARAM) { arguments[i] = context.getMetadataProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.PAGING_PROJECTION_PARAM) { arguments[i] = context.getPagingProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.CONTEXT || param.getParamType() == Parameter.ParamType.PAGING_CONTEXT_PARAM) { PagingContext ctx = RestUtils.getPagingContext(context, (PagingContext) param.getDefaultValue()); arguments[i] = ctx; continue; } else if (param.getParamType() == Parameter.ParamType.PATH_KEYS || param.getParamType() == Parameter.ParamType.PATH_KEYS_PARAM) { arguments[i] = context.getPathKeys(); continue; } else if (param.getParamType() == Parameter.ParamType.PATH_KEY_PARAM) { Object value = context.getPathKeys().get(param.getName()); if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.RESOURCE_CONTEXT || param.getParamType() == Parameter.ParamType.RESOURCE_CONTEXT_PARAM) { arguments[i] = context; continue; } else if (param.getParamType() == Parameter.ParamType.VALIDATOR_PARAM) { RestLiDataValidator validator = new RestLiDataValidator(resourceMethod.getResourceModel().getResourceClass().getAnnotations(), resourceMethod.getResourceModel().getValueClass(), resourceMethod.getMethodType()); arguments[i] = validator; continue; } else if (param.getParamType() == Parameter.ParamType.RESTLI_ATTACHMENTS_PARAM) { arguments[i] = context.getRequestAttachmentReader(); attachmentsDesired = true; continue; } else if (param.getParamType() == Parameter.ParamType.UNSTRUCTURED_DATA_WRITER_PARAM) { // The OutputStream is passed to the resource implementation in a synchronous call. Upon return of the // resource method, all the bytes would haven't written to the OutputStream. The EntityStream would have // contained all the bytes by the time data is requested. The ownership of the OutputStream is passed to // the ByteArrayOutputStreamWriter, which is responsible of closing the OutputStream if necessary. ByteArrayOutputStream out = new ByteArrayOutputStream(); context.setResponseEntityStream(EntityStreams.newEntityStream(new ByteArrayOutputStreamWriter(out))); arguments[i] = new UnstructuredDataWriter(out, context); continue; } else if (param.getParamType() == Parameter.ParamType.UNSTRUCTURED_DATA_REACTIVE_READER_PARAM) { arguments[i] = new UnstructuredDataReactiveReader(context.getRequestEntityStream(), context.getRawRequest().getHeader(RestConstants.HEADER_CONTENT_TYPE)); continue; } else if (param.getParamType() == Parameter.ParamType.POST) { // handle action parameters if (template != null) { DataMap data = template.data(); if (data.containsKey(param.getName())) { arguments[i] = template.getValue(param); continue; } } } else if (param.getParamType() == Parameter.ParamType.QUERY) { Object value; if (DataTemplate.class.isAssignableFrom(param.getType())) { value = buildDataTemplateArgument(context.getStructuredParameter(param.getName()), param, resourceMethodConfig.shouldValidateQueryParams()); } else { value = buildRegularArgument(context, param, resourceMethodConfig.shouldValidateQueryParams()); } if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.BATCH || param.getParamType() == Parameter.ParamType.RESOURCE_KEY) { // should not come to this routine since it should be handled by passing in positionalArguments throw new RoutingException("Parameter '" + param.getName() + "' should be passed in as a positional argument", HttpStatus.S_400_BAD_REQUEST.getCode()); } else { // unknown param type throw new RoutingException( "Parameter '" + param.getName() + "' has an unknown parameter type '" + param.getParamType().name() + "'", HttpStatus.S_400_BAD_REQUEST.getCode()); } } catch (TemplateRuntimeException e) { throw new RoutingException("Parameter '" + param.getName() + "' is invalid", HttpStatus.S_400_BAD_REQUEST.getCode()); } try { // Handling null-valued parameters not provided in resource context or entity body // check if it is optional parameter if (param.isOptional() && param.hasDefaultValue()) { arguments[i] = param.getDefaultValue(); } else if (param.isOptional() && !param.getType().isPrimitive()) { // optional primitive parameter must have default value or provided arguments[i] = null; } else { throw new RoutingException("Parameter '" + param.getName() + "' is required", HttpStatus.S_400_BAD_REQUEST.getCode()); } } catch (ResourceConfigException e) { // Parameter default value format exception should result in server error code 500. throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Parameter '" + param.getName() + "' default value is invalid", e); } } //Verify that if the resource method did not expect attachments, and attachments were present, that we drain all //incoming attachments and send back a bad request. We must take precaution here since simply ignoring the request //attachments is not correct behavior here. Ignoring other request level constructs such as headers or query parameters //that were not needed is safe, but not for request attachments. if (!attachmentsDesired && context.getRequestAttachmentReader() != null) { throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Resource method endpoint invoked does not accept any request attachments."); } return arguments; }
@Test public void testUnstructuredDataWriterParam() { ServerResourceContext mockResourceContext = EasyMock.createMock(ServerResourceContext.class); mockResourceContext.setResponseEntityStream(EasyMock.anyObject()); EasyMock.expectLastCall().once(); EasyMock.expect(mockResourceContext.getRequestAttachmentReader()).andReturn(null); EasyMock.replay(mockResourceContext); @SuppressWarnings({"unchecked","rawtypes"}) final Parameter<UnstructuredDataWriterParam> param = new Parameter("RestLi Unstructured Data Writer", UnstructuredDataWriter.class, null, false, null, Parameter.ParamType.UNSTRUCTURED_DATA_WRITER_PARAM, false, AnnotationSet.EMPTY); List<Parameter<?>> parameters = Collections.singletonList(param); Object[] results = ArgumentBuilder.buildArgs(new Object[0], getMockResourceMethod(parameters), mockResourceContext, null, getMockResourceMethodConfig(false)); UnstructuredDataWriter result = (UnstructuredDataWriter) results[0]; Assert.assertNotNull(result); Assert.assertTrue(result.getOutputStream() instanceof ByteArrayOutputStream); EasyMock.verify(mockResourceContext); }
public <T> void execute(final AsyncTask<T> task) { try { // some small tasks such as validation can be performed here. task.onPreCall(); } catch (Exception e) { task.onError(e); return; } service.submit(new FutureTask<>(task) { @Override protected void done() { super.done(); try { /* * called in context of background thread. There is other variant possible where result is * posted back and sits in the queue of caller thread which then picks it up for * processing. An example of such a system is Android OS, where the UI elements can only * be updated using UI thread. So result must be posted back in UI thread. */ task.onPostCall(get()); } catch (InterruptedException e) { // should not occur } catch (ExecutionException e) { task.onError(e.getCause()); } } }); }
@Test void testPerfectExecution() throws Exception { final var result = new Object(); when(task.call()).thenReturn(result); service.execute(task); verify(task, timeout(2000)).onPostCall(eq(result)); final var inOrder = inOrder(task); inOrder.verify(task, times(1)).onPreCall(); inOrder.verify(task, times(1)).call(); inOrder.verify(task, times(1)).onPostCall(eq(result)); verifyNoMoreInteractions(task); }
public int getPoolSize() { return poolSize; }
@Test public void testGetCorePoolSize() { ExecutorConfig executorConfig = new ExecutorConfig(); assertTrue(executorConfig.getPoolSize() == ExecutorConfig.DEFAULT_POOL_SIZE); }
@Override public MenuDO getMenu(Long id) { return menuMapper.selectById(id); }
@Test public void testGetMenu() { // mock 数据 MenuDO menu = randomPojo(MenuDO.class); menuMapper.insert(menu); // 准备参数 Long id = menu.getId(); // 调用 MenuDO dbMenu = menuService.getMenu(id); // 断言 assertPojoEquals(menu, dbMenu); }
@Override public Resource parseResource(Request request, Secured secured) { if (StringUtils.isNotBlank(secured.resource())) { return parseSpecifiedResource(secured); } String type = secured.signType(); AbstractGrpcResourceParser parser = resourceParserMap.get(type); if (parser == null) { Loggers.AUTH.warn("Can't find Grpc request resourceParser for type {}", type); return useSpecifiedParserToParse(secured, request); } return parser.parse(request, secured); }
@Test @Secured() void testParseResourceWithNamingType() throws NoSuchMethodException { Secured secured = getMethodSecure("testParseResourceWithNamingType"); Resource actual = protocolAuthService.parseResource(namingRequest, secured); assertEquals(SignType.NAMING, actual.getType()); assertEquals("testS", actual.getName()); assertEquals("testNNs", actual.getNamespaceId()); assertEquals("testNG", actual.getGroup()); assertNotNull(actual.getProperties()); }
public void visit(Entry entry) { final AFreeplaneAction action = new EntryAccessor().getAction(entry); if (action != null) { final EntryAccessor entryAccessor = new EntryAccessor(); String accelerator = entryAccessor.getAccelerator(entry); if(accelerator != null) { map.setDefaultAccelerator(action, accelerator); } else map.setUserDefinedAccelerator(action); entries.registerEntry(action, entry); } }
@Test public void givenEntryWithoutAccelerator_setsUserDefinedAccelerator() { Entry actionEntry = new Entry(); final AFreeplaneAction action = mock(AFreeplaneAction.class); new EntryAccessor().setAction(actionEntry, action); IAcceleratorMap map = mock(IAcceleratorMap.class); final AcceleratorBuilder acceleratorBuilder = new AcceleratorBuilder(map, mock(EntriesForAction.class)); acceleratorBuilder.visit(actionEntry); Mockito.verify(map).setUserDefinedAccelerator(action); }
public static <X> TypeInformation<X> getForObject(X value) { return new TypeExtractor().privateGetForObject(value); }
@Test void testEitherFromObjectException() { assertThatThrownBy( () -> { Either<String, Tuple1<Integer>> either = Either.Left("test"); TypeExtractor.getForObject(either); }) .isInstanceOf(InvalidTypesException.class); }
@Implementation protected HttpResponse execute( HttpHost httpHost, HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException { if (FakeHttp.getFakeHttpLayer().isInterceptingHttpRequests()) { return FakeHttp.getFakeHttpLayer() .emulateRequest(httpHost, httpRequest, httpContext, realObject); } else { FakeHttp.getFakeHttpLayer() .addRequestInfo(new HttpRequestInfo(httpRequest, httpHost, httpContext, redirector)); HttpResponse response = redirector.execute(httpHost, httpRequest, httpContext); if (FakeHttp.getFakeHttpLayer().isInterceptingResponseContent()) { interceptResponseContent(response); } FakeHttp.getFakeHttpLayer().addHttpResponse(response); return response; } }
@Test public void shouldHandleMultipleInvocations() throws Exception { FakeHttp.addPendingHttpResponse(200, "a happy response body"); FakeHttp.addPendingHttpResponse(201, "another happy response body"); HttpResponse response1 = requestDirector.execute(null, new HttpGet("http://example.com"), null); HttpResponse response2 = requestDirector.execute(null, new HttpGet("www.example.com"), null); assertThat(response1.getStatusLine().getStatusCode()).isEqualTo(200); assertThat(getStringContent(response1)).isEqualTo("a happy response body"); assertThat(response2.getStatusLine().getStatusCode()).isEqualTo(201); assertThat(getStringContent(response2)).isEqualTo("another happy response body"); }
public Span nextSpan(ConsumerRecord<?, ?> record) { // Even though the type is ConsumerRecord, this is not a (remote) consumer span. Only "poll" // events create consumer spans. Since this is a processor span, we use the normal sampler. TraceContextOrSamplingFlags extracted = extractAndClearTraceIdHeaders(processorExtractor, record.headers(), record.headers()); Span result = tracer.nextSpan(extracted); if (extracted.context() == null && !result.isNoop()) { addTags(record, result); } return result; }
@Test void nextSpan_uses_current_context() { Span child; try (Scope scope = tracing.currentTraceContext().newScope(parent)) { child = kafkaTracing.nextSpan(consumerRecord); } child.finish(); assertThat(spans.get(0).id()).isEqualTo(child.context().spanIdString()); assertChildOf(spans.get(0), parent); }
public static String getUserTenantForAns() { String tmp = USER_TENANT; if (StringUtils.isBlank(USER_TENANT)) { tmp = NacosClientProperties.PROTOTYPE.getProperty(SystemPropertyKeyConst.ANS_NAMESPACE); } return tmp; }
@Test void testGetUserTenantForAns() { String expect = "test"; System.setProperty("ans.namespace", expect); String actual = TenantUtil.getUserTenantForAns(); assertEquals(expect, actual); }
@Nullable public CacheScope parent() { int r = mId.lastIndexOf(SEPARATOR, mLength - 1); if (r < 0) { return GLOBAL; } else if (r == 0) { return null; } return new CacheScope(mId, r, mLevel.parent()); }
@Test public void parent() { CacheScope schema = CacheScope.create("schema"); CacheScope table = CacheScope.create("schema.table"); CacheScope partition = CacheScope.create("schema.table.partition"); assertNull(CacheScope.GLOBAL.parent()); assertNull(schema.parent().parent()); assertEquals(CacheScope.GLOBAL, schema.parent()); assertEquals(schema, table.parent()); assertEquals(table, partition.parent()); assertEquals(CacheScope.create("schema.table.partition1").parent(), CacheScope.create("schema.table.partition2").parent()); assertEquals(CacheScope.create("schema.table.part").parent(), CacheScope.create("schema.table.partition").parent()); assertEquals(CacheScope.create("schema.table.part").parent().parent(), CacheScope.create("schema.table").parent()); }
public CoercedExpressionResult coerce() { final Class<?> leftClass = left.getRawClass(); final Class<?> nonPrimitiveLeftClass = toNonPrimitiveType(leftClass); final Class<?> rightClass = right.getRawClass(); final Class<?> nonPrimitiveRightClass = toNonPrimitiveType(rightClass); boolean sameClass = leftClass == rightClass; boolean isUnificationExpression = left instanceof UnificationTypedExpression || right instanceof UnificationTypedExpression; if (sameClass || isUnificationExpression) { return new CoercedExpressionResult(left, right); } if (!canCoerce()) { throw new CoercedExpressionException(new InvalidExpressionErrorResult("Comparison operation requires compatible types. Found " + leftClass + " and " + rightClass)); } if ((nonPrimitiveLeftClass == Integer.class || nonPrimitiveLeftClass == Long.class) && nonPrimitiveRightClass == Double.class) { CastExpr castExpression = new CastExpr(PrimitiveType.doubleType(), this.left.getExpression()); return new CoercedExpressionResult( new TypedExpression(castExpression, double.class, left.getType()), right, false); } final boolean leftIsPrimitive = leftClass.isPrimitive() || Number.class.isAssignableFrom( leftClass ); final boolean canCoerceLiteralNumberExpr = canCoerceLiteralNumberExpr(leftClass); boolean rightAsStaticField = false; final Expression rightExpression = right.getExpression(); final TypedExpression coercedRight; if (leftIsPrimitive && canCoerceLiteralNumberExpr && rightExpression instanceof LiteralStringValueExpr) { final Expression coercedLiteralNumberExprToType = coerceLiteralNumberExprToType((LiteralStringValueExpr) right.getExpression(), leftClass); coercedRight = right.cloneWithNewExpression(coercedLiteralNumberExprToType); coercedRight.setType( leftClass ); } else if (shouldCoerceBToString(left, right)) { coercedRight = coerceToString(right); } else if (isNotBinaryExpression(right) && canBeNarrowed(leftClass, rightClass) && right.isNumberLiteral()) { coercedRight = castToClass(leftClass); } else if (leftClass == long.class && rightClass == int.class) { coercedRight = right.cloneWithNewExpression(new CastExpr(PrimitiveType.longType(), right.getExpression())); } else if (leftClass == Date.class && rightClass == String.class) { coercedRight = coerceToDate(right); rightAsStaticField = true; } else if (leftClass == LocalDate.class && rightClass == String.class) { coercedRight = coerceToLocalDate(right); rightAsStaticField = true; } else if (leftClass == LocalDateTime.class && rightClass == String.class) { coercedRight = coerceToLocalDateTime(right); rightAsStaticField = true; } else if (shouldCoerceBToMap()) { coercedRight = castToClass(toNonPrimitiveType(leftClass)); } else if (isBoolean(leftClass) && !isBoolean(rightClass)) { coercedRight = coerceBoolean(right); } else { coercedRight = right; } final TypedExpression coercedLeft; if (nonPrimitiveLeftClass == Character.class && shouldCoerceBToString(right, left)) { coercedLeft = coerceToString(left); } else { coercedLeft = left; } return new CoercedExpressionResult(coercedLeft, coercedRight, rightAsStaticField); }
@Test public void avoidCoercingBinaryExpressions () { final TypedExpression left = expr(THIS_PLACEHOLDER + ".getAddress().getCity() == \"Brno\" && _this.getAddress().getStreet() == \"Technology Park\"", String.class); final TypedExpression right = expr(THIS_PLACEHOLDER + ".getAddress().getNumber() == 1", int.class); final CoercedExpression.CoercedExpressionResult coerce = new CoercedExpression(left, right, false).coerce(); assertThat(coerce.getCoercedLeft()).isEqualTo(expr(THIS_PLACEHOLDER + ".getAddress().getCity() == \"Brno\" && _this.getAddress().getStreet() == \"Technology Park\"", String.class)); assertThat(coerce.getCoercedRight()).isEqualTo(expr(THIS_PLACEHOLDER + ".getAddress().getNumber() == 1", int.class)); }
public static String toQueryString(Map<String, String> ps) { StringBuilder buf = new StringBuilder(); if (ps != null && ps.size() > 0) { for (Map.Entry<String, String> entry : new TreeMap<String, String>(ps).entrySet()) { String key = entry.getKey(); String value = entry.getValue(); if (isNoneEmpty(key, value)) { if (buf.length() > 0) { buf.append('&'); } buf.append(key); buf.append('='); buf.append(value); } } } return buf.toString(); }
@Test void testToQueryString() throws Exception { Map<String, String> map = new HashMap<String, String>(); map.put("key1", "value1"); map.put("key2", "value2"); String queryString = StringUtils.toQueryString(map); assertThat(queryString, containsString("key1=value1")); assertThat(queryString, containsString("key2=value2")); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { return this.list(directory, listener, String.valueOf(Path.DELIMITER)); }
@Test public void testListNotFoundFolder() throws Exception { final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.volume, Path.Type.directory)); final String name = new AlphanumericRandomStringService().random(); try { new S3ObjectListService(session, new S3AccessControlListFeature(session)).list(new Path(container, name, EnumSet.of(Path.Type.directory)), new DisabledListProgressListener()); fail(); } catch(NotfoundException e) { // Expected } final Path file = new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch(new Path(container, String.format("%s-", name), EnumSet.of(Path.Type.file)), new TransferStatus()); try { new S3ObjectListService(session, new S3AccessControlListFeature(session)).list(new Path(container, name, EnumSet.of(Path.Type.directory)), new DisabledListProgressListener()); fail(); } catch(NotfoundException e) { // Expected } new S3DefaultDeleteFeature(session).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public boolean existWorkflowIdInInstances(String workflowId) { return withMetricLogError( () -> withRetryableQuery( CHECK_WORKFLOW_ID_IN_INSTANCES_QUERY, stmt -> stmt.setString(1, workflowId), ResultSet::next), "existWorkflowIdInInstances", "Failed to check the existence of the workflow instance for workflow id [{}]", workflowId); }
@Test public void testExistWorkflowIdInInstances() { assertTrue(instanceDao.existWorkflowIdInInstances(wfi.getWorkflowId())); assertFalse(instanceDao.existWorkflowIdInInstances("not-existing")); }
public void checkExecutePrerequisites(final ExecutionContext executionContext) { ShardingSpherePreconditions.checkState(isValidExecutePrerequisites(executionContext), () -> new TableModifyInTransactionException(getTableName(executionContext))); }
@Test void assertCheckExecutePrerequisitesWhenExecuteTruncateInPostgreSQLTransaction() { when(transactionRule.getDefaultType()).thenReturn(TransactionType.LOCAL); ExecutionContext executionContext = new ExecutionContext( new QueryContext(createPostgreSQLTruncateStatementContext(), "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)), Collections.emptyList(), mock(RouteContext.class)); new ProxySQLExecutor(JDBCDriverType.STATEMENT, databaseConnectionManager, mock(DatabaseConnector.class), mockQueryContext()).checkExecutePrerequisites(executionContext); }
@Override public ResultSet getTypeInfo() { return null; }
@Test void assertGetTypeInfo() { assertNull(metaData.getTypeInfo()); }
@Override public void unsubscribe(String serviceName, String groupName, String clusters) throws NacosException { NAMING_LOGGER.info("[GRPC-UNSUBSCRIBE] service:{}, group:{}, cluster:{} ", serviceName, groupName, clusters); redoService.subscriberDeregister(serviceName, groupName, clusters); doUnsubscribe(serviceName, groupName, clusters); }
@Test void testUnsubscribe() throws Exception { SubscribeServiceResponse res = new SubscribeServiceResponse(); ServiceInfo info = new ServiceInfo(GROUP_NAME + "@@" + SERVICE_NAME + "@@" + CLUSTERS); res.setServiceInfo(info); when(this.rpcClient.request(any())).thenReturn(res); client.unsubscribe(SERVICE_NAME, GROUP_NAME, CLUSTERS); verify(this.rpcClient, times(1)).request(argThat(request -> { if (request instanceof SubscribeServiceRequest) { SubscribeServiceRequest request1 = (SubscribeServiceRequest) request; // verify request fields return !request1.isSubscribe() && SERVICE_NAME.equals(request1.getServiceName()) && GROUP_NAME.equals( request1.getGroupName()) && CLUSTERS.equals(request1.getClusters()) && NAMESPACE_ID.equals( request1.getNamespace()); } return false; })); }
@Override public Coder<VarianceAccumulator> getAccumulatorCoder( CoderRegistry registry, Coder<T> inputCoder) { return SerializableCoder.of(VarianceAccumulator.class); }
@Test public void testCreatesAccumulatorCoder() { assertNotNull(varianceFn.getAccumulatorCoder(CoderRegistry.createDefault(), VarIntCoder.of())); }
@Override public void deregisterApplication( ApplicationStatus finalStatus, @Nullable String optionalDiagnostics) { // first, de-register from YARN final FinalApplicationStatus yarnStatus = getYarnStatus(finalStatus); log.info( "Unregister application from the YARN Resource Manager with final status {}.", yarnStatus); final Optional<URL> historyServerURL = HistoryServerUtils.getHistoryServerURL(flinkConfig); final String appTrackingUrl = historyServerURL.map(URL::toString).orElse(""); try { resourceManagerClient.unregisterApplicationMaster( yarnStatus, optionalDiagnostics, appTrackingUrl); } catch (YarnException | IOException e) { log.error("Could not unregister the application master.", e); } Utils.deleteApplicationFiles(configuration.getYarnFiles()); }
@Test void testDeleteApplicationFiles() throws Exception { new Context() { { final File applicationDir = Files.createTempDirectory(tmpFolder, ".flink").toFile(); env.put(FLINK_YARN_FILES, applicationDir.getCanonicalPath()); runTest( () -> { getDriver().deregisterApplication(ApplicationStatus.SUCCEEDED, null); assertThat(applicationDir.toPath()).doesNotExist(); }); } }; }
public boolean isEnabled() { return analysisMetadataHolder.isPullRequest() || periodHolder.hasPeriodDate() || (periodHolder.hasPeriod() && isOnBranchUsingReferenceBranch()); }
@Test public void isEnabled_returns_true_when_periodDate_present() { periodHolder.setPeriod(new Period(NewCodePeriodType.NUMBER_OF_DAYS.name(), "10", 1000L)); assertThat(newIssueClassifier.isEnabled()).isTrue(); }
@GetMapping(params = "search=blur") @Secured(resource = AuthConstants.CONSOLE_RESOURCE_NAME_PREFIX + "roles", action = ActionTypes.READ) public Page<RoleInfo> fuzzySearchRole(@RequestParam int pageNo, @RequestParam int pageSize, @RequestParam(name = "username", defaultValue = "") String username, @RequestParam(name = "role", defaultValue = "") String role) { return roleService.findRolesLike4Page(username, role, pageNo, pageSize); }
@Test void testFuzzySearchRole() { Page<RoleInfo> rolesTest = new Page<RoleInfo>(); when(roleService.findRolesLike4Page(anyString(), anyString(), anyInt(), anyInt())).thenReturn(rolesTest); Page<RoleInfo> roleInfoPage = roleController.fuzzySearchRole(1, 10, "nacos", "test"); assertEquals(rolesTest, roleInfoPage); }
@Override public List<RoleDO> getRoleList() { return roleMapper.selectList(); }
@Test public void testGetRoleList() { // mock 数据 RoleDO dbRole01 = randomPojo(RoleDO.class, o -> o.setStatus(CommonStatusEnum.ENABLE.getStatus())); roleMapper.insert(dbRole01); RoleDO dbRole02 = randomPojo(RoleDO.class, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus())); roleMapper.insert(dbRole02); // 调用 List<RoleDO> list = roleService.getRoleList(); // 断言 assertEquals(2, list.size()); assertPojoEquals(dbRole01, list.get(0)); assertPojoEquals(dbRole02, list.get(1)); }
public ScalingSuggestionsMaintainer(NodeRepository nodeRepository, Duration interval, Metric metric) { super(nodeRepository, interval, metric); this.autoscaler = new Autoscaler(nodeRepository); }
@Test public void testScalingSuggestionsMaintainer() { ProvisioningTester tester = new ProvisioningTester.Builder().zone(new Zone(Environment.prod, RegionName.from("us-east3"))) .flavorsConfig(flavorsConfig()) .build(); ApplicationId app1 = ProvisioningTester.applicationId("app1"); ApplicationId app2 = ProvisioningTester.applicationId("app2"); ClusterSpec cluster1 = ProvisioningTester.containerClusterSpec(); ClusterSpec cluster2 = ProvisioningTester.contentClusterSpec(); tester.makeReadyNodes(20, "flt", NodeType.host, 8); tester.activateTenantHosts(); tester.deploy(app1, cluster1, Capacity.from(new ClusterResources(5, 1, new NodeResources(4, 4, 10, 0.1)), new ClusterResources(5, 1, new NodeResources(4, 4, 10, 0.1)), IntRange.empty(), false, true, Optional.empty(), ClusterInfo.empty())); storeCompletion(app1, cluster1.id(), tester.nodeRepository()); tester.deploy(app2, cluster2, Capacity.from(new ClusterResources(5, 1, new NodeResources(4, 4, 10, 0.1)), new ClusterResources(10, 1, new NodeResources(6.5, 5, 15, 0.1)), IntRange.empty(), false, true, Optional.empty(), ClusterInfo.empty())); storeCompletion(app2, cluster2.id(), tester.nodeRepository()); tester.clock().advance(Duration.ofHours(13)); Duration timeAdded = addMeasurements(0.90f, 0.90f, 0.90f, 0, 500, app1, tester.nodeRepository()); tester.clock().advance(timeAdded.negated()); addMeasurements(0.99f, 0.99f, 0.99f, 0, 500, app2, tester.nodeRepository()); ScalingSuggestionsMaintainer maintainer = new ScalingSuggestionsMaintainer(tester.nodeRepository(), Duration.ofMinutes(1), new TestMetric()); maintainer.maintain(); assertEquals("8 nodes with [vcpu: 3.3, memory: 4.0 Gb, disk: 10.0 Gb, bandwidth: 0.1 Gbps, architecture: any]", suggestionOf(app1, cluster1, tester).resources().get().toString()); assertEquals("7 nodes with [vcpu: 4.4, memory: 5.3 Gb, disk: 16.5 Gb, bandwidth: 0.1 Gbps, architecture: any]", suggestionOf(app2, cluster2, tester).resources().get().toString()); // Secondary suggestions assertEquals("9 nodes with [vcpu: 2.9, memory: 4.0 Gb, disk: 10.0 Gb, bandwidth: 0.1 Gbps, architecture: any]", suggestionsOf(app1, cluster1, tester).get(1).resources().get().toString()); assertEquals("8 nodes with [vcpu: 3.8, memory: 4.7 Gb, disk: 14.2 Gb, bandwidth: 0.1 Gbps, architecture: any]", suggestionsOf(app2, cluster2, tester).get(1).resources().get().toString()); // Utilization goes way down tester.clock().advance(Duration.ofHours(13)); addMeasurements(0.10f, 0.10f, 0.10f, 0, 500, app1, tester.nodeRepository()); maintainer.maintain(); assertEquals("Suggestion stays at the peak value observed", "8 nodes with [vcpu: 3.3, memory: 4.0 Gb, disk: 10.0 Gb, bandwidth: 0.1 Gbps, architecture: any]", suggestionOf(app1, cluster1, tester).resources().get().toString()); // Utilization is still way down and a week has passed tester.clock().advance(Duration.ofDays(7)); addMeasurements(0.10f, 0.10f, 0.10f, 0, 500, app1, tester.nodeRepository()); maintainer.maintain(); assertEquals("Peak suggestion has been outdated", "3 nodes with [vcpu: 1.3, memory: 4.0 Gb, disk: 10.0 Gb, bandwidth: 0.1 Gbps, architecture: any]", suggestionOf(app1, cluster1, tester).resources().get().toString()); assertTrue(shouldSuggest(app1, cluster1, tester)); tester.clock().advance(Duration.ofDays(3)); addMeasurements(0.7f, 0.7f, 0.7f, 0, 500, app1, tester.nodeRepository()); maintainer.maintain(); var suggested = tester.nodeRepository().applications().get(app1).get().cluster(cluster1.id()).get().suggestions().stream().findFirst().flatMap(Autoscaling::resources).get(); tester.deploy(app1, cluster1, Capacity.from(suggested, suggested, IntRange.empty(), false, true, Optional.empty(), ClusterInfo.empty())); tester.clock().advance(Duration.ofDays(2)); addMeasurements(0.2f, 0.65f, 0.6f, 0, 500, app1, tester.nodeRepository()); maintainer.maintain(); assertEquals("Suggestion is to keep the current allocation", suggested, suggestionOf(app1, cluster1, tester).resources().get()); assertFalse("Suggestion is not made as it matches what we have", shouldSuggest(app1, cluster1, tester)); }
@Override @CheckForNull public EmailMessage format(Notification notification) { if (!"alerts".equals(notification.getType())) { return null; } // Retrieve useful values String projectId = notification.getFieldValue("projectId"); String projectKey = notification.getFieldValue("projectKey"); String projectName = notification.getFieldValue("projectName"); String projectVersion = notification.getFieldValue("projectVersion"); String branchName = notification.getFieldValue("branch"); String alertName = notification.getFieldValue("alertName"); String alertText = notification.getFieldValue("alertText"); String alertLevel = notification.getFieldValue("alertLevel"); String ratingMetricsInOneString = notification.getFieldValue("ratingMetrics"); boolean isNewAlert = Boolean.parseBoolean(notification.getFieldValue("isNewAlert")); String fullProjectName = computeFullProjectName(projectName, branchName); // Generate text String subject = generateSubject(fullProjectName, alertLevel, isNewAlert); String messageBody = generateMessageBody(projectName, projectKey, projectVersion, branchName, alertName, alertText, isNewAlert, ratingMetricsInOneString); // And finally return the email that will be sent return new EmailMessage() .setMessageId("alerts/" + projectId) .setSubject(subject) .setPlainTextMessage(messageBody); }
@Test public void shouldNotFormatIfNotCorrectNotification() { Notification notification = new Notification("other-notif"); EmailMessage message = template.format(notification); assertThat(message, nullValue()); }
@Override public Boolean load(@Nonnull final NamedQuantity key) { if (Strings.isNullOrEmpty(key.getName())) { return false; } final String filteredName = key.getName().trim(); for (final ItemThreshold entry : itemThresholds) { if (WildcardMatcher.matches(entry.getItemName(), filteredName) && entry.quantityHolds(key.getQuantity())) { return true; } } return false; }
@Test(timeout = 1000) public void testExplosive() { String name = "archer" + Strings.repeat('e', 50000) + "s ring"; WildcardMatchLoader loader = new WildcardMatchLoader(Arrays.asList(name + "* < 100")); assertTrue(loader.load(new NamedQuantity(name, 50))); assertFalse(loader.load(new NamedQuantity(name, 150))); }
public static DeliveryReceipt from(Message message) { return message.getExtension(DeliveryReceipt.class); }
@Test public void receiptManagerListenerTest() throws Exception { DummyConnection c = new DummyConnection(); c.connect(); DeliveryReceiptManager drm = DeliveryReceiptManager.getInstanceFor(c); TestReceiptReceivedListener rrl = new TestReceiptReceivedListener(); drm.addReceiptReceivedListener(rrl); Message m = StanzaBuilder.buildMessage("reply-id") .from("julia@capulet.com") .to("romeo@montague.com") .ofType(Message.Type.normal) .addExtension(new DeliveryReceipt("original-test-id")) .build(); c.processStanza(m); rrl.waitUntilInvocationOrTimeout(); }
public ValidationResult validateMessagesAndAssignOffsets(PrimitiveRef.LongRef offsetCounter, MetricsRecorder metricsRecorder, BufferSupplier bufferSupplier) { if (sourceCompressionType == CompressionType.NONE && targetCompression.type() == CompressionType.NONE) { // check the magic value if (!records.hasMatchingMagic(toMagic)) return convertAndAssignOffsetsNonCompressed(offsetCounter, metricsRecorder); else // Do in-place validation, offset assignment and maybe set timestamp return assignOffsetsNonCompressed(offsetCounter, metricsRecorder); } else return validateMessagesAndAssignOffsetsCompressed(offsetCounter, metricsRecorder, bufferSupplier); }
@Test public void testOffsetAssignmentAfterDownConversionV1ToV0NonCompressed() { long offset = 1234567; long now = System.currentTimeMillis(); MemoryRecords records = createRecords(RecordBatch.MAGIC_VALUE_V1, now, Compression.NONE); checkOffsets(records, 0); checkOffsets(new LogValidator( records, new TopicPartition("topic", 0), time, CompressionType.NONE, Compression.NONE, false, RecordBatch.MAGIC_VALUE_V0, TimestampType.CREATE_TIME, 5000L, 5000L, RecordBatch.NO_PARTITION_LEADER_EPOCH, AppendOrigin.CLIENT, MetadataVersion.latestTesting() ).validateMessagesAndAssignOffsets( PrimitiveRef.ofLong(offset), metricsRecorder, RequestLocal.withThreadConfinedCaching().bufferSupplier() ).validatedRecords, offset); }
@Description("convert Unicode code point to a string") @ScalarFunction @SqlType("varchar(1)") public static Slice chr(@SqlType(StandardTypes.BIGINT) long codepoint) { try { return SliceUtf8.codePointToUtf8(Ints.saturatedCast(codepoint)); } catch (InvalidCodePointException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Not a valid Unicode code point: " + codepoint, e); } }
@Test public void testChr() { assertFunction("CHR(65)", createVarcharType(1), "A"); assertFunction("CHR(9731)", createVarcharType(1), "\u2603"); assertFunction("CHR(131210)", createVarcharType(1), new String(Character.toChars(131210))); assertFunction("CHR(0)", createVarcharType(1), "\0"); assertInvalidFunction("CHR(-1)", "Not a valid Unicode code point: -1"); assertInvalidFunction("CHR(1234567)", "Not a valid Unicode code point: 1234567"); assertInvalidFunction("CHR(8589934592)", "Not a valid Unicode code point: 8589934592"); }
Request createGetRequest(GHRequest ghRequest) { if (ghRequest.getCustomModel() != null) throw new IllegalArgumentException("Custom models cannot be used for GET requests. Use setPostRequest(true)"); boolean tmpInstructions = ghRequest.getHints().getBool(INSTRUCTIONS, instructions); boolean tmpCalcPoints = ghRequest.getHints().getBool(CALC_POINTS, calcPoints); String tmpOptimize = ghRequest.getHints().getString("optimize", optimize); if (tmpInstructions && !tmpCalcPoints) { throw new IllegalStateException("Cannot calculate instructions without points (only points without instructions). " + "Use calc_points=false and instructions=false to disable point and instruction calculation"); } boolean tmpElevation = ghRequest.getHints().getBool("elevation", elevation); String places = ""; for (GHPoint p : ghRequest.getPoints()) { places += "&point=" + round6(p.lat) + "," + round6(p.lon); } String type = ghRequest.getHints().getString("type", "json"); String url = routeServiceUrl + "?" + "profile=" + ghRequest.getProfile() + places + "&type=" + type + "&instructions=" + tmpInstructions + "&points_encoded=true" + "&points_encoded_multiplier=1000000" + "&calc_points=" + tmpCalcPoints + "&algorithm=" + ghRequest.getAlgorithm() + "&locale=" + ghRequest.getLocale().toString() + "&elevation=" + tmpElevation + "&optimize=" + tmpOptimize; for (String details : ghRequest.getPathDetails()) { url += "&" + Parameters.Details.PATH_DETAILS + "=" + encodeURL(details); } // append *all* point hints if at least one is not empty if (ghRequest.getPointHints().stream().anyMatch(h -> !h.isEmpty())) for (String hint : ghRequest.getPointHints()) url += "&" + Parameters.Routing.POINT_HINT + "=" + encodeURL(hint); // append *all* curbsides if at least one is not empty if (ghRequest.getCurbsides().stream().anyMatch(c -> !c.isEmpty())) for (String curbside : ghRequest.getCurbsides()) url += "&" + Parameters.Routing.CURBSIDE + "=" + encodeURL(curbside); // append *all* headings only if at least *one* is not NaN if (ghRequest.getHeadings().stream().anyMatch(h -> !Double.isNaN(h))) for (Double heading : ghRequest.getHeadings()) url += "&heading=" + heading; for (String snapPrevention : ghRequest.getSnapPreventions()) { url += "&" + Parameters.Routing.SNAP_PREVENTION + "=" + encodeURL(snapPrevention); } if (!key.isEmpty()) { url += "&key=" + encodeURL(key); } for (Map.Entry<String, Object> entry : ghRequest.getHints().toMap().entrySet()) { String urlKey = entry.getKey(); String urlValue = entry.getValue().toString(); // use lower case conversion for check only! if (ignoreSetForGet.contains(toLowerCase(urlKey))) { continue; } if (urlValue != null && !urlValue.isEmpty()) { url += "&" + encodeURL(urlKey) + "=" + encodeURL(urlValue); } } return new Request.Builder().url(url) .header(X_GH_CLIENT_VERSION, GH_VERSION_FROM_MAVEN) .build(); }
@Test public void profileIncludedAsGiven() { GraphHopperWeb hopper = new GraphHopperWeb("https://localhost:8000/route"); // no vehicle -> no vehicle assertEquals("https://localhost:8000/route?profile=&type=json&instructions=true&points_encoded=true&points_encoded_multiplier=1000000" + "&calc_points=true&algorithm=&locale=en_US&elevation=false&optimize=false", hopper.createGetRequest(new GHRequest()).url().toString()); // vehicle given -> vehicle used in url assertEquals("https://localhost:8000/route?profile=my_car&type=json&instructions=true&points_encoded=true&points_encoded_multiplier=1000000" + "&calc_points=true&algorithm=&locale=en_US&elevation=false&optimize=false", hopper.createGetRequest(new GHRequest().setProfile("my_car")).url().toString()); }
public static String formatTime(long millis) { return DATE_FORMAT.get().format(millis); }
@Test public void testFormatTime() { long time = Time.now(); assertEquals(Time.formatTime(time), DATE_FORMAT.get().format(time)); }
public URLNormalizer addDomainTrailingSlash() { String urlRoot = HttpURL.getRoot(url); String path = toURL().getPath(); if (StringUtils.isNotBlank(path)) { // there is a path so do nothing return this; } String urlRootAndPath = urlRoot + "/"; url = StringUtils.replaceOnce(url, urlRoot, urlRootAndPath); return this; }
@Test public void testAddDomainTrailingSlash() { s = "http://www.example.com"; t = "http://www.example.com/"; assertEquals(t, n(s).addDomainTrailingSlash().toString()); s = "http://www.example.com/"; t = "http://www.example.com/"; assertEquals(t, n(s).addDomainTrailingSlash().toString()); s = "http://www.example.com/blah"; t = "http://www.example.com/blah"; assertEquals(t, n(s).addDomainTrailingSlash().toString()); s = "http://www.example.com/blah/path"; t = "http://www.example.com/blah/path"; assertEquals(t, n(s).addDomainTrailingSlash().toString()); s = "http://www.example.com?param1=value1&param2=value2"; t = "http://www.example.com/?param1=value1&param2=value2"; assertEquals(t, n(s).addDomainTrailingSlash().toString()); s = "http://www.example.com/?param1=value1&param2=value2"; t = "http://www.example.com/?param1=value1&param2=value2"; assertEquals(t, n(s).addDomainTrailingSlash().toString()); s = "http://www.example.com#hash"; t = "http://www.example.com/#hash"; assertEquals(t, n(s).addDomainTrailingSlash().toString()); s = "http://www.example.com/#hash"; t = "http://www.example.com/#hash"; assertEquals(t, n(s).addDomainTrailingSlash().toString()); }
@Override protected CompletableFuture<EmptyResponseBody> handleRequest( @Nonnull final HandlerRequest<EmptyRequestBody> request, @Nonnull final RestfulGateway gateway) throws RestHandlerException { final String jarId = request.getPathParameter(JarIdPathParameter.class); return CompletableFuture.supplyAsync( () -> { final Path jarToDelete = jarDir.resolve(jarId); if (!Files.exists(jarToDelete)) { throw new CompletionException( new RestHandlerException( String.format( "File %s does not exist in %s.", jarId, jarDir), HttpResponseStatus.BAD_REQUEST)); } else { try { Files.delete(jarToDelete); return EmptyResponseBody.getInstance(); } catch (final IOException e) { throw new CompletionException( new RestHandlerException( String.format("Failed to delete jar %s.", jarToDelete), HttpResponseStatus.INTERNAL_SERVER_ERROR, e)); } } }, executor); }
@Tag("org.apache.flink.testutils.junit.FailsInGHAContainerWithRootUser") @Test void testFailedDelete() throws Exception { makeJarDirReadOnly(); final HandlerRequest<EmptyRequestBody> request = createRequest(TEST_JAR_NAME); assertThatThrownBy(() -> jarDeleteHandler.handleRequest(request, restfulGateway).get()) .satisfies( e -> { final Throwable throwable = ExceptionUtils.stripCompletionException(e.getCause()); assertThat(throwable).isInstanceOf(RestHandlerException.class); final RestHandlerException restHandlerException = (RestHandlerException) throwable; assertThat(restHandlerException.getMessage()) .contains("Failed to delete jar"); assertThat(restHandlerException.getHttpResponseStatus()) .isEqualTo(HttpResponseStatus.INTERNAL_SERVER_ERROR); }); }
@Override public void onChange(List<JobRunrMetadata> metadataList) { if (this.serversWithLongGCCyclesMetadataList == null || this.serversWithLongGCCyclesMetadataList.size() != metadataList.size()) { problems.removeProblemsOfType(CpuAllocationIrregularityProblem.PROBLEM_TYPE); if (!metadataList.isEmpty()) { problems.addProblem(new CpuAllocationIrregularityProblem(metadataList)); problems.removeProblemsOfType(PollIntervalInSecondsTimeBoxIsTooSmallProblem.PROBLEM_TYPE); storageProvider.deleteMetadata(PollIntervalInSecondsTimeBoxIsTooSmallNotification.class.getSimpleName()); } this.serversWithLongGCCyclesMetadataList = metadataList; } }
@Test void ifNoChangesOnCpuAllocationIrregularitiesThenNoProblemsCreated() { cpuAllocationIrregularityProblemHandler.onChange(emptyList()); verifyNoInteractions(problems); }
public static String normalize(String url) { return normalize(url, false); }
@Test public void normalizeTest2() { String url = "http://www.hutool.cn//aaa/\\bbb?a=1&b=2"; String normalize = URLUtil.normalize(url); assertEquals("http://www.hutool.cn//aaa//bbb?a=1&b=2", normalize); url = "www.hutool.cn//aaa/bbb?a=1&b=2"; normalize = URLUtil.normalize(url); assertEquals("http://www.hutool.cn//aaa/bbb?a=1&b=2", normalize); }
public static JibContainerBuilder create( String baseImageReference, Set<Platform> platforms, CommonCliOptions commonCliOptions, ConsoleLogger logger) throws InvalidImageReferenceException, FileNotFoundException { if (baseImageReference.startsWith(DOCKER_DAEMON_IMAGE_PREFIX)) { return Jib.from( DockerDaemonImage.named(baseImageReference.replaceFirst(DOCKER_DAEMON_IMAGE_PREFIX, ""))); } if (baseImageReference.startsWith(TAR_IMAGE_PREFIX)) { return Jib.from( TarImage.at(Paths.get(baseImageReference.replaceFirst(TAR_IMAGE_PREFIX, "")))); } ImageReference imageReference = ImageReference.parse(baseImageReference.replaceFirst(REGISTRY_IMAGE_PREFIX, "")); RegistryImage registryImage = RegistryImage.named(imageReference); DefaultCredentialRetrievers defaultCredentialRetrievers = DefaultCredentialRetrievers.init( CredentialRetrieverFactory.forImage( imageReference, logEvent -> logger.log(logEvent.getLevel(), logEvent.getMessage()))); Credentials.getFromCredentialRetrievers(commonCliOptions, defaultCredentialRetrievers) .forEach(registryImage::addCredentialRetriever); JibContainerBuilder containerBuilder = Jib.from(registryImage); if (!platforms.isEmpty()) { containerBuilder.setPlatforms(platforms); } return containerBuilder; }
@Test public void testCreate_platforms() throws IOException, InvalidImageReferenceException { JibContainerBuilder containerBuilder = ContainerBuilders.create( "registry://registry-image-ref", ImmutableSet.of(new Platform("arch1", "os1"), new Platform("arch2", "os2")), mockCommonCliOptions, mockLogger); assertThat(containerBuilder.toContainerBuildPlan().getPlatforms()) .isEqualTo(ImmutableSet.of(new Platform("arch1", "os1"), new Platform("arch2", "os2"))); }
@Override public Optional<String> getDefaultSchema() { return Optional.of("public"); }
@Test void assertGetDefaultSchema() { assertThat(dialectDatabaseMetaData.getDefaultSchema(), is(Optional.of("public"))); }
@ExecuteOn(TaskExecutors.IO) @Delete(uri = "{namespace}/{id}") @Operation(tags = {"Templates"}, summary = "Delete a template") @ApiResponse(responseCode = "204", description = "On success") public HttpResponse<Void> delete( @Parameter(description = "The template namespace") @PathVariable String namespace, @Parameter(description = "The template id") @PathVariable String id ) { Optional<Template> template = templateRepository.findById(tenantService.resolveTenant(), namespace, id); if (template.isPresent()) { templateRepository.delete(template.get()); return HttpResponse.status(HttpStatus.NO_CONTENT); } else { return HttpResponse.status(HttpStatus.NOT_FOUND); } }
@Test void deleteTemplatesByIds() { postTemplate("template-a", "kestra.test.delete"); postTemplate("template-b", "kestra.test.delete"); postTemplate("template-c", "kestra.test.delete"); List<IdWithNamespace> ids = List.of( new IdWithNamespace("kestra.test.delete", "template-a"), new IdWithNamespace("kestra.test.delete", "template-b"), new IdWithNamespace("kestra.test.delete", "template-c") ); HttpResponse<BulkResponse> response = client .toBlocking() .exchange(DELETE("/api/v1/templates/delete/by-ids", ids), BulkResponse.class); assertThat(response.getBody().get().getCount(), is(3)); HttpClientResponseException templateA = assertThrows(HttpClientResponseException.class, () -> { client.toBlocking().retrieve(HttpRequest.GET("/api/v1/templates/kestra.test.delete/template-a")); }); HttpClientResponseException templateB = assertThrows(HttpClientResponseException.class, () -> { client.toBlocking().retrieve(HttpRequest.GET("/api/v1/templates/kestra.test.delete/template-b")); }); HttpClientResponseException templateC = assertThrows(HttpClientResponseException.class, () -> { client.toBlocking().retrieve(HttpRequest.GET("/api/v1/templates/kestra.test.delete/template-c")); }); assertThat(templateA.getStatus(), is(HttpStatus.NOT_FOUND)); assertThat(templateB.getStatus(), is(HttpStatus.NOT_FOUND)); assertThat(templateC.getStatus(), is(HttpStatus.NOT_FOUND)); }
@Override public void writeInt(final int v) throws IOException { ensureAvailable(INT_SIZE_IN_BYTES); Bits.writeInt(buffer, pos, v, isBigEndian); pos += INT_SIZE_IN_BYTES; }
@Test public void testWriteIntForVByteOrder() throws Exception { int expected = 100; out.writeInt(expected, LITTLE_ENDIAN); int actual = Bits.readIntL(out.buffer, 0); assertEquals(expected, actual); }
@Override void decode(ByteBufAllocator alloc, ByteBuf headerBlock, SpdyHeadersFrame frame) throws Exception { ObjectUtil.checkNotNull(headerBlock, "headerBlock"); ObjectUtil.checkNotNull(frame, "frame"); if (cumulation == null) { decodeHeaderBlock(headerBlock, frame); if (headerBlock.isReadable()) { cumulation = alloc.buffer(headerBlock.readableBytes()); cumulation.writeBytes(headerBlock); } } else { cumulation.writeBytes(headerBlock); decodeHeaderBlock(cumulation, frame); if (cumulation.isReadable()) { cumulation.discardReadBytes(); } else { releaseBuffer(); } } }
@Test public void testNegativeNameLength() throws Exception { ByteBuf headerBlock = Unpooled.buffer(8); headerBlock.writeInt(1); headerBlock.writeInt(-1); decoder.decode(ByteBufAllocator.DEFAULT, headerBlock, frame); assertFalse(headerBlock.isReadable()); assertTrue(frame.isInvalid()); assertEquals(0, frame.headers().names().size()); headerBlock.release(); }
@GET @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Get prekey count", description = "Gets the number of one-time prekeys uploaded for this device and still available") @ApiResponse(responseCode = "200", description = "Body contains the number of available one-time prekeys for the device.", useReturnTypeSchema = true) @ApiResponse(responseCode = "401", description = "Account authentication check failed.") public CompletableFuture<PreKeyCount> getStatus(@ReadOnly @Auth final AuthenticatedDevice auth, @QueryParam("identity") @DefaultValue("aci") final IdentityType identityType) { final CompletableFuture<Integer> ecCountFuture = keysManager.getEcCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId()); final CompletableFuture<Integer> pqCountFuture = keysManager.getPqCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId()); return ecCountFuture.thenCombine(pqCountFuture, PreKeyCount::new); }
@Test void putKeysByPhoneNumberIdentifierTestV2() { final ECPreKey preKey = KeysHelper.ecPreKey(31337); final ECSignedPreKey signedPreKey = KeysHelper.signedECPreKey(31338, AuthHelper.VALID_PNI_IDENTITY_KEY_PAIR); final SetKeysRequest setKeysRequest = new SetKeysRequest(List.of(preKey), signedPreKey, null, null); Response response = resources.getJerseyTest() .target("/v2/keys") .queryParam("identity", "pni") .request() .header("Authorization", AuthHelper.getAuthHeader(AuthHelper.VALID_UUID, AuthHelper.VALID_PASSWORD)) .put(Entity.entity(setKeysRequest, MediaType.APPLICATION_JSON_TYPE)); assertThat(response.getStatus()).isEqualTo(204); ArgumentCaptor<List<ECPreKey>> listCaptor = ArgumentCaptor.forClass(List.class); verify(KEYS).storeEcOneTimePreKeys(eq(AuthHelper.VALID_PNI), eq(SAMPLE_DEVICE_ID), listCaptor.capture()); assertThat(listCaptor.getValue()).containsExactly(preKey); verify(KEYS).storeEcSignedPreKeys(AuthHelper.VALID_PNI, AuthHelper.VALID_DEVICE.getId(), signedPreKey); }
public boolean isFound() { return found; }
@Test public void testIgnoreInstructionsForSlightTurnWithOtherTurn() { // Test for a fork with one slight turn and one actual turn. We are going along the slight turn. No turn instruction needed in this case Weighting weighting = new SpeedWeighting(mixedCarSpeedEnc); Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED) .calcPath(16, 19); assertTrue(p.isFound()); InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr); // Contain start, and finish instruction assertEquals(2, wayList.size()); }
public static long acquireMillisBetween(final LocalDateTime start, final LocalDateTime end) { return start.until(end, ChronoUnit.MILLIS); }
@Test public void testAcquireMillisBetween() { LocalDateTime start = LocalDateTime.now(); LocalDateTime end = start.plusMinutes(1); assertEquals(60 * 1000, DateUtils.acquireMillisBetween(start, end)); }
public CompletableFuture<RingbufferSlice<Map.Entry<Long, byte[]>>> readMetrics(long startSequence) { if (!config.isEnabled()) { throw new IllegalArgumentException("Metrics collection is not enabled"); } CompletableFuture<RingbufferSlice<Map.Entry<Long, byte[]>>> future = new CompletableFuture<>(); future.whenCompleteAsync(withTryCatch(logger, (s, e) -> pendingReads.remove(future)), CALLER_RUNS); pendingReads.put(future, startSequence); tryCompleteRead(future, startSequence); return future; }
@Test public void testReadMetricsThrowsOnFutureSequence() { MetricsService metricsService = prepareMetricsService(); MetricConsumer metricConsumerMock = mock(MetricConsumer.class); long futureSequence = 42; long headSequence = 0; assertThatExceptionOfType(ExecutionException.class) .isThrownBy(() -> readMetrics(metricsService, futureSequence, metricConsumerMock)) .withCauseExactlyInstanceOf(ConcurrentArrayRingbuffer.SequenceOutOfBoundsException.class) .withMessageContainingAll(Long.toString(futureSequence), Long.toString(headSequence)); }
static boolean shouldRun(String script, String healthScript) { if (healthScript == null || healthScript.trim().isEmpty()) { LOG.info("Missing location for the node health check script \"{}\".", script); return false; } File f = new File(healthScript); if (!f.exists()) { LOG.warn("File {} for script \"{}\" does not exist.", healthScript, script); return false; } if (!FileUtil.canExecute(f)) { LOG.warn("File {} for script \"{}\" can not be executed.", healthScript, script); return false; } return true; }
@Test public void testNodeHealthScriptShouldRun() throws IOException { assertFalse("Node health script should start", NodeHealthScriptRunner.shouldRun("script", nodeHealthscriptFile.getAbsolutePath())); writeNodeHealthScriptFile("", false); // Node health script should not start if the node health script is not // executable. assertFalse("Node health script should start", NodeHealthScriptRunner.shouldRun("script", nodeHealthscriptFile.getAbsolutePath())); writeNodeHealthScriptFile("", true); assertTrue("Node health script should start", NodeHealthScriptRunner.shouldRun("script", nodeHealthscriptFile.getAbsolutePath())); }
@Bean @ConditionalOnMissingBean(ZookeeperDataChangedInit.class) public DataChangedInit zookeeperDataChangedInit(final ZookeeperClient zkClient) { return new ZookeeperDataChangedInit(zkClient); }
@Test public void testZookeeperDataInit() { ZookeeperSyncConfiguration zookeeperListener = new ZookeeperSyncConfiguration(); assertNotNull(zookeeperListener.zookeeperDataChangedInit(zkClient)); }
public static String getAppKey() { return appKey; }
@Test void testGetAppKey() { String defaultVal = ParamUtil.getAppKey(); assertEquals(defaultAppKey, defaultVal); String expect = "test"; ParamUtil.setAppKey(expect); assertEquals(expect, ParamUtil.getAppKey()); }
public NewIssuesNotification newNewIssuesNotification(Map<String, UserDto> assigneesByUuid) { verifyAssigneesByUuid(assigneesByUuid); return new NewIssuesNotification(new DetailsSupplierImpl(assigneesByUuid)); }
@Test public void newNewIssuesNotification_DetailsSupplier_getUserNameByUuid_fails_with_NPE_if_uuid_is_null() { NewIssuesNotification underTest = this.underTest.newNewIssuesNotification(emptyMap()); DetailsSupplier detailsSupplier = readDetailsSupplier(underTest); assertThatThrownBy(() -> detailsSupplier.getUserNameByUuid(null)) .isInstanceOf(NullPointerException.class) .hasMessage("uuid can't be null"); }
@Override public final long getEstimatedSizeBytes(PipelineOptions options) throws IOException { // This implementation of method getEstimatedSizeBytes is provided to simplify subclasses. Here // we perform the size estimation of files and file patterns using the interface provided by // FileSystem. String fileOrPattern = fileOrPatternSpec.get(); if (mode == Mode.FILEPATTERN) { Long maybeNumBytes = filesSizeBytes.get(); if (maybeNumBytes != null) { return maybeNumBytes; } long totalSize = 0; List<Metadata> allMatches = FileSystems.match(fileOrPattern, emptyMatchTreatment).metadata(); for (Metadata metadata : allMatches) { totalSize += metadata.sizeBytes(); } LOG.info( "Filepattern {} matched {} files with total size {}", fileOrPattern, allMatches.size(), totalSize); filesSizeBytes.compareAndSet(null, totalSize); return totalSize; } else { long start = getStartOffset(); long end = Math.min(getEndOffset(), getMaxEndOffset(options)); return end - start; } }
@Test public void testEstimatedSizeOfFilePattern() throws Exception { List<String> data1 = createStringDataset(3, 20); File file1 = createFileWithData("file1", data1); List<String> data2 = createStringDataset(3, 40); File file2 = createFileWithData("file2", data2); List<String> data3 = createStringDataset(3, 30); File file3 = createFileWithData("file3", data3); List<String> data4 = createStringDataset(3, 45); createFileWithData("otherfile", data4); List<String> data5 = createStringDataset(3, 53); createFileWithData("anotherfile", data5); TestFileBasedSource source = new TestFileBasedSource(new File(file1.getParent(), "file*").getPath(), 64, null); // Estimated size of the file pattern based source should be the total size of files that the // corresponding pattern is expanded into. assertEquals( file1.length() + file2.length() + file3.length(), source.getEstimatedSizeBytes(null)); }
public Statement buildStatement(final ParserRuleContext parseTree) { return build(Optional.of(getSources(parseTree)), parseTree); }
@Test public void shouldBuildIntervalUnit() { // Given: final SingleStatementContext stmt = givenQuery("SELECT TIMESTAMPADD(MINUTES, 5, Col4) FROM TEST1;"); // When: final Query result = (Query) builder.buildStatement(stmt); // Then: assertThat(result.getSelect(), is(new Select(ImmutableList.of( new SingleColumn( new FunctionCall( FunctionName.of("TIMESTAMPADD"), ImmutableList.of( new IntervalUnit(TimeUnit.MINUTES), new IntegerLiteral(5), column("COL4") ) ), Optional.empty()) )))); }
@Override public abstract String toString();
@Test public void testTransforming_actual_toString() { assertThat(LENGTHS.toString()).isEqualTo("has a length of"); }
@Override public boolean batchPublishAggr(final String dataId, final String group, final String tenant, final Map<String, String> datumMap, final String appName) { try { Boolean isPublishOk = false; for (Map.Entry<String, String> entry : datumMap.entrySet()) { addAggrConfigInfo(dataId, group, tenant, entry.getKey(), appName, entry.getValue()); } isPublishOk = databaseOperate.update(EmbeddedStorageContextHolder.getCurrentSqlContext()); if (isPublishOk == null) { return false; } return isPublishOk; } finally { EmbeddedStorageContextHolder.cleanAllContext(); } }
@Test void testBatchPublishAggrSuccess() { String dataId = "dataId111"; String group = "group"; String tenant = "tenant"; //mock query datumId and equal with current content param. Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, "d1"}), eq(String.class))) .thenReturn("c1"); Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, "d2"}), eq(String.class))) .thenReturn("c2"); Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, "d3"}), eq(String.class))) .thenReturn("c3"); Mockito.when(databaseOperate.update(any(List.class))).thenReturn(true); Map<String, String> datumMap = new HashMap<>(); datumMap.put("d1", "c1"); datumMap.put("d2", "c2"); datumMap.put("d3", "c3"); String appName = "appname1234"; boolean result = embededConfigInfoAggrPersistService.batchPublishAggr(dataId, group, tenant, datumMap, appName); assertTrue(result); }
@Override public void execute(GraphModel graphModel) { final Graph graph; if (useUndirected) { graph = graphModel.getUndirectedGraphVisible(); } else { graph = graphModel.getDirectedGraphVisible(); } execute(graph); }
@Test public void testColumnReplace() { GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(1); graphModel.getNodeTable().addColumn(Hits.HUB, String.class); Hits h = new Hits(); h.execute(graphModel); }
@Override public Rule getByKey(RuleKey key) { verifyKeyArgument(key); ensureInitialized(); Rule rule = rulesByKey.get(key); checkArgument(rule != null, "Can not find rule for key %s. This rule does not exist in DB", key); return rule; }
@Test public void getByKey_throws_IAE_if_argument_is_deprecated_key_in_DB_of_non_existing_rule() { expectIAERuleNotFound(() -> underTest.getByKey(DEPRECATED_KEY_OF_NON_EXITING_RULE), DEPRECATED_KEY_OF_NON_EXITING_RULE); }
LatencyTrackingListState( String stateName, InternalListState<K, N, T> original, LatencyTrackingStateConfig latencyTrackingStateConfig) { super( original, new ListStateLatencyMetrics( stateName, latencyTrackingStateConfig.getMetricGroup(), latencyTrackingStateConfig.getSampleInterval(), latencyTrackingStateConfig.getHistorySize(), latencyTrackingStateConfig.isStateNameAsVariable())); }
@Test @SuppressWarnings({"unchecked", "rawtypes"}) void testLatencyTrackingListState() throws Exception { AbstractKeyedStateBackend<Integer> keyedBackend = createKeyedBackend(getKeySerializer()); try { LatencyTrackingListState<Integer, VoidNamespace, Long> latencyTrackingState = (LatencyTrackingListState) createLatencyTrackingState(keyedBackend, getStateDescriptor()); latencyTrackingState.setCurrentNamespace(VoidNamespace.INSTANCE); LatencyTrackingListState.ListStateLatencyMetrics latencyTrackingStateMetric = latencyTrackingState.getLatencyTrackingStateMetric(); assertThat(latencyTrackingStateMetric.getAddCount()).isZero(); assertThat(latencyTrackingStateMetric.getAddAllCount()).isZero(); assertThat(latencyTrackingStateMetric.getGetCount()).isZero(); assertThat(latencyTrackingStateMetric.getUpdateCount()).isZero(); assertThat(latencyTrackingStateMetric.getMergeNamespaceCount()).isZero(); setCurrentKey(keyedBackend); for (int index = 1; index <= SAMPLE_INTERVAL; index++) { int expectedResult = index == SAMPLE_INTERVAL ? 0 : index; latencyTrackingState.add(ThreadLocalRandom.current().nextLong()); assertThat(latencyTrackingStateMetric.getAddCount()).isEqualTo(expectedResult); latencyTrackingState.addAll( Collections.singletonList(ThreadLocalRandom.current().nextLong())); assertThat(latencyTrackingStateMetric.getAddAllCount()).isEqualTo(expectedResult); latencyTrackingState.update( Collections.singletonList(ThreadLocalRandom.current().nextLong())); assertThat(latencyTrackingStateMetric.getUpdateCount()).isEqualTo(expectedResult); latencyTrackingState.get(); assertThat(latencyTrackingStateMetric.getGetCount()).isEqualTo(expectedResult); latencyTrackingState.mergeNamespaces( VoidNamespace.INSTANCE, Collections.emptyList()); assertThat(latencyTrackingStateMetric.getMergeNamespaceCount()) .isEqualTo(expectedResult); } } finally { if (keyedBackend != null) { keyedBackend.close(); keyedBackend.dispose(); } } }
@Override public void collect(MetricsEmitter metricsEmitter) { for (Map.Entry<MetricKey, KafkaMetric> entry : ledger.getMetrics()) { MetricKey metricKey = entry.getKey(); KafkaMetric metric = entry.getValue(); try { collectMetric(metricsEmitter, metricKey, metric); } catch (Exception e) { // catch and log to continue processing remaining metrics log.error("Error processing Kafka metric {}", metricKey, e); } } }
@Test public void testMeasurableCounterDeltaMetrics() { Sensor sensor = metrics.sensor("test"); sensor.add(metricName, new WindowedCount()); sensor.record(); sensor.record(); time.sleep(60 * 1000L); // Collect delta metrics. testEmitter.onlyDeltaMetrics(true); collector.collect(testEmitter); List<SinglePointMetric> result = testEmitter.emittedMetrics(); // Should get exactly 2 Kafka measurables since Metrics always includes a count measurable. assertEquals(2, result.size()); Metric counter = result.stream() .flatMap(metrics -> Stream.of(metrics.builder().build())) .filter(metric -> metric.getName().equals("test.domain.group1.name1")).findFirst().get(); assertTrue(counter.hasSum()); assertEquals(tags, getTags(counter.getSum().getDataPoints(0).getAttributesList())); assertEquals(AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA, counter.getSum().getAggregationTemporality()); assertTrue(counter.getSum().getIsMonotonic()); NumberDataPoint point = counter.getSum().getDataPoints(0); assertEquals(2d, point.getAsDouble(), 0.0); assertEquals(TimeUnit.SECONDS.toNanos(Instant.ofEpochSecond(61L).getEpochSecond()) + Instant.ofEpochSecond(61L).getNano(), point.getTimeUnixNano()); assertEquals(TimeUnit.SECONDS.toNanos(Instant.ofEpochSecond(1L).getEpochSecond()) + Instant.ofEpochSecond(1L).getNano(), point.getStartTimeUnixNano()); }
public StreamXmlRecordReader(FSDataInputStream in, FileSplit split, TaskAttemptContext context, Configuration conf, FileSystem fs) throws IOException { super(in, split, context, conf, fs); beginMark_ = checkJobGet(CONF_NS + "begin"); endMark_ = checkJobGet(CONF_NS + "end"); maxRecSize_ = conf_.getInt(CONF_NS + "maxrec", 50 * 1000); lookAhead_ = conf_.getInt(CONF_NS + "lookahead", 2 * maxRecSize_); synched_ = false; slowMatch_ = conf_.getBoolean(CONF_NS + "slowmatch", false); if (slowMatch_) { beginPat_ = makePatternCDataOrMark(beginMark_); endPat_ = makePatternCDataOrMark(endMark_); } init(); }
@Test public void testStreamXmlRecordReader() throws Exception { Job job = Job.getInstance(); Configuration conf = job.getConfiguration(); job.setJarByClass(TestStreamXmlRecordReader.class); job.setMapperClass(Mapper.class); conf.set("stream.recordreader.class", "org.apache.hadoop.streaming.mapreduce.StreamXmlRecordReader"); conf.set("stream.recordreader.begin", "<PATTERN>"); conf.set("stream.recordreader.end", "</PATTERN>"); job.setInputFormatClass(StreamInputFormat.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Text.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path("target/input.xml")); OUTPUT_DIR = new Path("target/output"); fs = FileSystem.get(conf); if (fs.exists(OUTPUT_DIR)) { fs.delete(OUTPUT_DIR, true); } FileOutputFormat.setOutputPath(job, OUTPUT_DIR); boolean ret = job.waitForCompletion(true); assertEquals(true, ret); checkOutput(); }
public void isNotEmpty() { if (checkNotNull(actual).isEmpty()) { failWithoutActual(simpleFact("expected not to be empty")); } }
@Test public void tableIsNotEmpty() { ImmutableTable<Integer, Integer, Integer> table = ImmutableTable.of(1, 5, 7); assertThat(table).isNotEmpty(); }
public Predicate<InMemoryFilterable> parse(final List<String> filterExpressions, final List<EntityAttribute> attributes) { if (filterExpressions == null || filterExpressions.isEmpty()) { return Predicates.alwaysTrue(); } final Map<String, List<Filter>> groupedByField = filterExpressions.stream() .map(expr -> singleFilterParser.parseSingleExpression(expr, attributes)) .collect(groupingBy(Filter::field)); return groupedByField.values().stream() .map(grouped -> grouped.stream() .map(Filter::toPredicate) .collect(Collectors.toList())) .map(groupedPredicates -> groupedPredicates.stream().reduce(Predicate::or).orElse(Predicates.alwaysTrue())) .reduce(Predicate::and).orElse(Predicates.alwaysTrue()); }
@Test void throwsExceptionOnFieldThatDoesNotExistInAttributeList() { assertThrows(IllegalArgumentException.class, () -> toTest.parse(List.of("strange_field:blabla"), List.of(EntityAttribute.builder() .id("owner") .title("Owner") .filterable(true) .build()) )); }
void appendInsertClause(StringBuilder sb) { sb.append("INSERT INTO "); dialect.quoteIdentifier(sb, jdbcTable.getExternalNameList()); sb.append(' '); appendFieldNames(sb, jdbcTable.dbFieldNames()); }
@Test void testAppendInsertClause() { PostgresUpsertQueryBuilder builder = new PostgresUpsertQueryBuilder(jdbcTable, dialect); StringBuilder sb = new StringBuilder(); builder.appendInsertClause(sb); String insertClause = sb.toString(); assertThat(insertClause).isEqualTo("INSERT INTO \"table1\" (\"field1\",\"field2\")"); }
public long getAndUpdate(LongUnaryOperator updateFunction) { long prev, next; do { prev = lvVal(); next = updateFunction.applyAsLong(prev); } while (!casVal(prev, next)); return prev; }
@Test public void testGetAndUpdate() { PaddedAtomicLong counter = new PaddedAtomicLong(); long value = counter.getAndUpdate(operand -> operand + 2); assertEquals(0, value); assertEquals(2, counter.get()); }