focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override protected void doRefresh(final List<AppAuthData> dataList) { authDataSubscribers.forEach(AuthDataSubscriber::refresh); dataList.forEach(appAuthData -> authDataSubscribers.forEach(authDataSubscriber -> authDataSubscriber.onSubscribe(appAuthData))); }
@Test public void testDoRefresh() { List<AppAuthData> appAuthDataList = createFakerAppAuthDataObjects(3); authDataHandler.doRefresh(appAuthDataList); authDataSubscribers.forEach(authDataSubscriber -> verify(authDataSubscriber).refresh()); appAuthDataList.forEach(appAuthData -> authDataSubscribers.forEach(authDataSubscriber -> verify(authDataSubscriber).onSubscribe(appAuthData))); }
@Override public int getOrder() { return PluginEnum.CACHE.getCode(); }
@Test public void getOrderTest() { final CachePlugin cachePlugin = new CachePlugin(); Assertions.assertEquals(cachePlugin.getOrder(), PluginEnum.CACHE.getCode()); }
@Override public long getAndDelete() { return get(getAndDeleteAsync()); }
@Test public void testGetAndDelete() { RAtomicLong al = redisson.getAtomicLong("test"); al.set(10); assertThat(al.getAndDelete()).isEqualTo(10); assertThat(al.isExists()).isFalse(); RAtomicLong ad2 = redisson.getAtomicLong("test2"); assertThat(ad2.getAndDelete()).isZero(); }
public static String findBsn(List<Container> categorieList){ return findValue(categorieList, CATEGORIE_IDENTIFICATIENUMMERS, ELEMENT_BURGERSERVICENUMMER); }
@Test public void testEmptyCategorie() { Container container = new Container(); assertThat(CategorieUtil.findBsn(List.of(container)), nullValue()); }
static void waitUntilFinish(@Nullable StreamGobbler gobbler) { if (gobbler != null) { try { gobbler.join(); } catch (InterruptedException ignored) { // consider as finished, restore the interrupted flag Thread.currentThread().interrupt(); } } }
@Test public void startupLogIsLoggedWhenJSONFormatIsNotActive() { InputStream stream = IOUtils.toInputStream("[startup] Admin is still using default credentials\nsecond log\n", StandardCharsets.UTF_8); Logger startupLogger = mock(Logger.class); Logger logger = mock(Logger.class); StreamGobbler gobbler = new StreamGobbler(stream, "WEB", appSettings, logger, startupLogger); verifyNoInteractions(startupLogger); gobbler.start(); StreamGobbler.waitUntilFinish(gobbler); verify(startupLogger).warn("Admin is still using default credentials"); verifyNoMoreInteractions(startupLogger); }
@Nonnull @Override public Sketch getResult() { return unionAll(); }
@Test public void testEmptyAccumulator() { ThetaSketchAccumulator accumulator = new ThetaSketchAccumulator(_setOperationBuilder, 2); Assert.assertTrue(accumulator.isEmpty()); Assert.assertEquals(accumulator.getResult().getEstimate(), 0.0); }
@Override public void filterProducer(Exchange exchange, WebServiceMessage response) { if (exchange != null) { processHeaderAndAttachments(exchange.getIn(AttachmentMessage.class), response); } }
@Test public void producerWithAttachment() throws Exception { exchange.getIn(AttachmentMessage.class).addAttachment("testAttachment", new DataHandler(this.getClass().getResource("/sampleAttachment.txt"))); filter.filterProducer(exchange, message); Assertions.assertThat(message.getAttachments()).isNotNull().isNotEmpty(); Assertions.assertThat(message.getAttachment("testAttachment")).isNotNull(); }
@Override @Transactional(rollbackFor = Exception.class) public void deleteSpu(Long id) { // 校验存在 validateSpuExists(id); // 校验商品状态不是回收站不能删除 ProductSpuDO spuDO = productSpuMapper.selectById(id); // 判断 SPU 状态是否为回收站 if (ObjectUtil.notEqual(spuDO.getStatus(), ProductSpuStatusEnum.RECYCLE.getStatus())) { throw exception(SPU_NOT_RECYCLE); } // TODO 芋艿:【可选】参与活动中的商品,不允许删除??? // 删除 SPU productSpuMapper.deleteById(id); // 删除关联的 SKU productSkuService.deleteSkuBySpuId(id); }
@Test void deleteSpu() { // 准备参数 ProductSpuDO createReqVO = randomPojo(ProductSpuDO.class,o->{ o.setCategoryId(generateId()); o.setBrandId(generateId()); o.setDeliveryTemplateId(generateId()); o.setSort(RandomUtil.randomInt(1,100)); // 限制排序范围 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setVirtualSalesCount(generaInt()); // 限制范围为正整数 o.setPrice(generaInt()); // 限制范围为正整数 o.setMarketPrice(generaInt()); // 限制范围为正整数 o.setCostPrice(generaInt()); // 限制范围为正整数 o.setStock(generaInt()); // 限制范围为正整数 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setSalesCount(generaInt()); // 限制范围为正整数 o.setBrowseCount(generaInt()); // 限制范围为正整数 o.setStatus(-1); // 加入回收站才可删除 }); productSpuMapper.insert(createReqVO); // 调用 productSpuService.deleteSpu(createReqVO.getId()); Assertions.assertNull(productSpuMapper.selectById(createReqVO.getId())); }
@Override public void batchDeregisterService(String serviceName, String groupName, List<Instance> instances) throws NacosException { NAMING_LOGGER.info("batch DeregisterInstance instances: {} ,serviceName: {} begin.", instances, serviceName); if (CollectionUtils.isEmpty(instances)) { NAMING_LOGGER.warn("batch DeregisterInstance instances is Empty:{}", instances); } grpcClientProxy.batchDeregisterService(serviceName, groupName, instances); NAMING_LOGGER.info("batch DeregisterInstance instances: {} ,serviceName: {} finish.", instances, serviceName); }
@Test void testBatchDeregisterServiceByGrpc() throws NacosException { String serviceName = "service1"; String groupName = "group1"; List<Instance> instanceList = new ArrayList<>(); delegate.batchDeregisterService(serviceName, groupName, instanceList); verify(mockGrpcClient, times(1)).batchDeregisterService(serviceName, groupName, instanceList); reset(mockGrpcClient); instanceList.add(new Instance()); delegate.batchDeregisterService(serviceName, groupName, instanceList); verify(mockGrpcClient, times(1)).batchDeregisterService(serviceName, groupName, instanceList); }
@Override public Boolean getBoolean( Object object ) throws KettleValueException { throw new KettleValueException( toStringMeta() + ": it's not possible to convert from Timestamp to Boolean" ); }
@Test( expected = KettleValueException.class ) public void testConvertTimestampToBoolean_Null() throws KettleValueException { ValueMetaTimestamp valueMetaTimestamp = new ValueMetaTimestamp(); valueMetaTimestamp.getBoolean( TIMESTAMP_WITH_MILLISECONDS ); }
public byte[] save(ScriptDefinition script) { int[] instructions = script.getInstructions(); int[] intOperands = script.getIntOperands(); String[] stringOperands = script.getStringOperands(); Map<Integer, Integer>[] switches = script.getSwitches(); OutputStream out = new OutputStream(); out.writeByte(0); // null string for (int i = 0; i < instructions.length; ++i) { int opcode = instructions[i]; out.writeShort(opcode); if (opcode == SCONST) { out.writeString(stringOperands[i]); } else if (opcode < 100 && opcode != RETURN && opcode != POP_INT && opcode != POP_STRING) { out.writeInt(intOperands[i]); } else { out.writeByte(intOperands[i]); } } out.writeInt(instructions.length); out.writeShort(script.getLocalIntCount()); out.writeShort(script.getLocalStringCount()); out.writeShort(script.getIntStackCount()); out.writeShort(script.getStringStackCount()); int switchStart = out.getOffset(); if (switches == null) { out.writeByte(0); } else { out.writeByte(switches.length); for (Map<Integer, Integer> s : switches) { out.writeShort(s.size()); for (Entry<Integer, Integer> e : s.entrySet()) { out.writeInt(e.getKey()); out.writeInt(e.getValue()); } } } int switchLength = out.getOffset() - switchStart; out.writeShort(switchLength); return out.flip(); }
@Test public void testSaveUnicode() throws IOException { Instructions instructions = new Instructions(); instructions.init(); ScriptDefinition script = new Assembler(instructions).assemble(getClass().getResourceAsStream(SCRIPT_RESOURCE_UNICODE)); byte[] saved = new ScriptSaver().save(script); ScriptDefinition loadedScripot = new ScriptLoader().load(1001, saved); assertEquals(script, loadedScripot); }
@Override public KeyValueSegment getOrCreateSegmentIfLive(final long segmentId, final ProcessorContext context, final long streamTime) { final KeyValueSegment segment = super.getOrCreateSegmentIfLive(segmentId, context, streamTime); cleanupExpiredSegments(streamTime); return segment; }
@Test public void shouldCloseAllOpenSegments() { final KeyValueSegment first = segments.getOrCreateSegmentIfLive(0, context, -1L); final KeyValueSegment second = segments.getOrCreateSegmentIfLive(1, context, -1L); final KeyValueSegment third = segments.getOrCreateSegmentIfLive(2, context, -1L); segments.close(); assertFalse(first.isOpen()); assertFalse(second.isOpen()); assertFalse(third.isOpen()); }
public Node parse() throws ScanException { if (tokenList == null || tokenList.isEmpty()) return null; return E(); }
@Test public void emptyTokenListDoesNotThrowNullPointerException() throws ScanException { // An empty token list would be returned from Tokenizer.tokenize() // if it were constructed with an empty string. The parser should // be able to handle this. Parser parser = new Parser(new ArrayList<Token>()); parser.parse(); }
@VisibleForTesting static TransferExtension findTransferExtension( ImmutableList<TransferExtension> transferExtensions, String service) { try { return transferExtensions .stream() .filter(ext -> ext.getServiceId().toLowerCase().equals(service.toLowerCase())) .collect(onlyElement()); } catch (IllegalArgumentException e) { throw new IllegalStateException( "Found multiple transfer extensions for service " + service, e); } catch (NoSuchElementException e) { throw new IllegalStateException( "Did not find a valid transfer extension for service " + service, e); } }
@Test public void findTransferExtension() { ImmutableList<TransferExtension> transferExtensions = ImmutableList.of(FOO_UPPER, BAR_UPPER); assertThat(WorkerModule.findTransferExtension(transferExtensions, "FOO")).isEqualTo(FOO_UPPER); assertThat(WorkerModule.findTransferExtension(transferExtensions, "foo")).isEqualTo(FOO_UPPER); }
@Override public <T> T loadObject(String accountName, ObjectType objectType, String objectKey) throws IllegalArgumentException, NotFoundException { if (objectType.equals(ObjectType.CANARY_RESULT_ARCHIVE)) { var record = sqlCanaryArchiveRepo .findById(objectKey) .orElseThrow(() -> new NotFoundException("Not found object for id: " + objectKey)); return mapToObject(record.getContent(), objectType); } if (objectType.equals(ObjectType.CANARY_CONFIG)) { var record = sqlCanaryConfigRepo .findById(objectKey) .orElseThrow(() -> new NotFoundException("Not found object for id: " + objectKey)); return mapToObject(record.getContent(), objectType); } if (objectType.equals(ObjectType.METRIC_SET_PAIR_LIST)) { var record = sqlMetricSetPairsRepo .findById(objectKey) .orElseThrow(() -> new NotFoundException("Not found object for id: " + objectKey)); return mapToObject(record.getContent(), objectType); } if (objectType.equals(ObjectType.METRIC_SET_LIST)) { var record = sqlMetricSetsRepo .findById(objectKey) .orElseThrow(() -> new NotFoundException("Not found object for id: " + objectKey)); return mapToObject(record.getContent(), objectType); } throw new IllegalArgumentException("Unsupported object type: " + objectType); }
@Test public void testLoadObjectWhenMetricSetsNotFound() { var testAccountName = UUID.randomUUID().toString(); var testObjectType = ObjectType.METRIC_SET_LIST; var testObjectKey = UUID.randomUUID().toString(); assertThrows( NotFoundException.class, () -> sqlStorageService.loadObject(testAccountName, testObjectType, testObjectKey)); }
@PostMapping("/check-token") @PermitAll @Operation(summary = "校验访问令牌") @Parameter(name = "token", required = true, description = "访问令牌", example = "biu") public CommonResult<OAuth2OpenCheckTokenRespVO> checkToken(HttpServletRequest request, @RequestParam("token") String token) { // 校验客户端 String[] clientIdAndSecret = obtainBasicAuthorization(request); oauth2ClientService.validOAuthClientFromCache(clientIdAndSecret[0], clientIdAndSecret[1], null, null, null); // 校验令牌 OAuth2AccessTokenDO accessTokenDO = oauth2TokenService.checkAccessToken(token); Assert.notNull(accessTokenDO, "访问令牌不能为空"); // 防御性检查 return success(OAuth2OpenConvert.INSTANCE.convert2(accessTokenDO)); }
@Test public void testCheckToken() { // 准备参数 HttpServletRequest request = mockRequest("demo_client_id", "demo_client_secret"); String token = randomString(); // mock 方法 OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class).setUserType(UserTypeEnum.ADMIN.getValue()).setExpiresTime(LocalDateTimeUtil.of(1653485731195L)); when(oauth2TokenService.checkAccessToken(eq(token))).thenReturn(accessTokenDO); // 调用 CommonResult<OAuth2OpenCheckTokenRespVO> result = oauth2OpenController.checkToken(request, token); // 断言 assertEquals(0, result.getCode()); assertPojoEquals(accessTokenDO, result.getData()); assertEquals(1653485731L, result.getData().getExp()); // 执行过程会过去几毫秒 }
public AgentBootstrapperArgs parse(String... args) { AgentBootstrapperArgs result = new AgentBootstrapperArgs(); try { new JCommander(result).parse(args); if (result.help) { printUsageAndExit(0); } return result; } catch (ParameterException e) { stderr.println(e.getMessage()); printUsageAndExit(1); } return null; }
@Test public void shouldRaisExceptionWhenInvalidSslModeIsPassed() { assertThatCode(() -> agentCLI.parse("-serverUrl", "https://go.example.com/go", "-sslVerificationMode", "FOOBAR")) .isInstanceOf(ExitException.class) .satisfies(o -> assertThat(((ExitException) o).getStatus()).isEqualTo(1)); assertThat(errorStream.toString()).contains("Invalid value for -sslVerificationMode parameter. Allowed values:[FULL, NONE, NO_VERIFY_HOST]"); assertThat(errorStream.toString()).contains("Usage: java -jar agent-bootstrapper.jar"); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException { try { final IRODSFileSystemAO fs = session.getClient(); final IRODSFile s = fs.getIRODSFileFactory().instanceIRODSFile(file.getAbsolute()); if(!s.exists()) { throw new NotfoundException(String.format("%s doesn't exist", file.getAbsolute())); } if(status.isExists()) { delete.delete(Collections.singletonMap(renamed, status), connectionCallback, callback); } final IRODSFile d = fs.getIRODSFileFactory().instanceIRODSFile(renamed.getAbsolute()); s.renameTo(d); return renamed; } catch(JargonException e) { throw new IRODSExceptionMappingService().map("Cannot rename {0}", e, file); } }
@Test(expected = NotfoundException.class) public void testMoveNotFound() throws Exception { final ProtocolFactory factory = new ProtocolFactory(new HashSet<>(Collections.singleton(new IRODSProtocol()))); final Profile profile = new ProfilePlistReader(factory).read( this.getClass().getResourceAsStream("/iRODS (iPlant Collaborative).cyberduckprofile")); final Host host = new Host(profile, profile.getDefaultHostname(), new Credentials( PROPERTIES.get("irods.key"), PROPERTIES.get("irods.secret") )); final IRODSSession session = new IRODSSession(host); session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final Path source = new Path(new IRODSHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); final Path destination = new Path(new IRODSHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); assertFalse(session.getFeature(Find.class).find(source)); assertFalse(session.getFeature(Find.class).find(destination)); new IRODSMoveFeature(session).move(source, destination, new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); }
@Override public int hashCode() { if (this.cacheHash && 0 != this.hashCode) { return this.hashCode; } final int prime = 31; int result = 1; result = prime * result + Arrays.deepHashCode(members); if (this.cacheHash) { this.hashCode = result; } return result; }
@Test public void hashCodeTest(){ final Tuple tuple = new Tuple(Locale.getDefault(), TimeZone.getDefault()); final Tuple tuple2 = new Tuple(Locale.getDefault(), TimeZone.getDefault()); assertEquals(tuple, tuple2); }
@VisibleForTesting static Object convertAvroField(Object avroValue, Schema schema) { if (avroValue == null) { return null; } switch (schema.getType()) { case NULL: case INT: case LONG: case DOUBLE: case FLOAT: case BOOLEAN: return avroValue; case ENUM: case STRING: return avroValue.toString(); // can be a String or org.apache.avro.util.Utf8 case UNION: for (Schema s : schema.getTypes()) { if (s.getType() == Schema.Type.NULL) { continue; } return convertAvroField(avroValue, s); } throw new IllegalArgumentException("Found UNION schema but it doesn't contain any type"); case ARRAY: case BYTES: case FIXED: case RECORD: case MAP: default: throw new UnsupportedOperationException("Unsupported avro schema type=" + schema.getType() + " for value field schema " + schema.getName()); } }
@Test(expectedExceptions = UnsupportedOperationException.class, expectedExceptionsMessageRegExp = "Unsupported avro schema type.*") public void testNotSupportedAvroTypesBytes() { BaseJdbcAutoSchemaSink.convertAvroField(new Object(), createFieldAndGetSchema((builder) -> builder.name("field").type().bytesType().noDefault())); }
@Override public ExecuteContext after(ExecuteContext context) { DefaultLitePullConsumer pullConsumer = (DefaultLitePullConsumer) context.getObject(); RocketMqPullConsumerController.cachePullConsumer(pullConsumer); // Get cached consumer packaging class instances DefaultLitePullConsumerWrapper pullConsumerWrapper = RocketMqPullConsumerController.getPullConsumerWrapper(pullConsumer); updatePushConsumerWrapperInfo(pullConsumerWrapper); if (handler != null) { handler.doAfter(context); return context; } // Consumer activation will execute consumption prohibition on consumers based on the cached consumption // prohibition configuration disablePullConsumption(pullConsumerWrapper); return context; }
@Test public void testAfter() { // the subscription method is assign PullConsumerLocalInfoUtils.setSubscriptionType(SubscriptionType.ASSIGN); PullConsumerLocalInfoUtils.setMessageQueue(messageQueues); interceptor.after(context); Assert.assertEquals(pullConsumerWrapper.getSubscriptionType().name(), "ASSIGN"); Assert.assertEquals(pullConsumerWrapper.getMessageQueues(), messageQueues); PullConsumerLocalInfoUtils.removeMessageQueue(); PullConsumerLocalInfoUtils.removeSubscriptionType(); // subscription method is SUBSCRIBE PullConsumerLocalInfoUtils.setSubscriptionType(SubscriptionType.SUBSCRIBE); interceptor.after(context); Assert.assertTrue(pullConsumerWrapper.getSubscribedTopics().contains("test-topic")); Assert.assertEquals(pullConsumerWrapper.getSubscriptionType().name(), "SUBSCRIBE"); PullConsumerLocalInfoUtils.removeSubscriptionType(); }
@Override public RSet<V> get(final K key) { String keyHash = keyHash(key); final String setName = getValuesName(keyHash); return new RedissonSet<V>(codec, commandExecutor, setName, null) { @Override public RFuture<Boolean> addAsync(V value) { return RedissonSetMultimap.this.putAsync(key, value); } @Override public RFuture<Boolean> addAllAsync(Collection<? extends V> c) { return RedissonSetMultimap.this.putAllAsync(key, c); } @Override public RFuture<Boolean> removeAsync(Object value) { return RedissonSetMultimap.this.removeAsync(key, value); } @Override public RFuture<Boolean> removeAllAsync(Collection<?> c) { if (c.isEmpty()) { return new CompletableFutureWrapper<>(false); } List<Object> args = new ArrayList<Object>(c.size() + 1); args.add(encodeMapKey(key)); encode(args, c); return commandExecutor.evalWriteAsync(RedissonSetMultimap.this.getRawName(), codec, RedisCommands.EVAL_BOOLEAN_AMOUNT, "local count = 0;" + "for i=2, #ARGV, 5000 do " + "count = count + redis.call('srem', KEYS[2], unpack(ARGV, i, math.min(i+4999, table.getn(ARGV)))) " + "end; " + "if count > 0 then " + "if redis.call('scard', KEYS[2]) == 0 then " + "redis.call('hdel', KEYS[1], ARGV[1]); " + "end; " + "return 1;" + "end;" + "return 0; ", Arrays.<Object>asList(RedissonSetMultimap.this.getRawName(), setName), args.toArray()); } @Override public RFuture<Boolean> deleteAsync() { ByteBuf keyState = encodeMapKey(key); return RedissonSetMultimap.this.fastRemoveAsync(Arrays.asList(keyState), Arrays.asList(RedissonSetMultimap.this.getRawName(), setName), RedisCommands.EVAL_BOOLEAN_AMOUNT); } @Override public RFuture<Boolean> clearExpireAsync() { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Long> remainTimeToLiveAsync() { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Void> renameAsync(String newName) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } @Override public RFuture<Boolean> renamenxAsync(String newName) { throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set"); } }; }
@Test public void testDistributedIterator() { RSetMultimap<String, String> map = redisson.getSetMultimap("set", StringCodec.INSTANCE); // populate set with elements List<String> stringsOne = IntStream.range(0, 128).mapToObj(i -> "one-" + i).collect(Collectors.toList()); List<String> stringsTwo = IntStream.range(0, 128).mapToObj(i -> "two-" + i).collect(Collectors.toList()); map.putAll("someKey", stringsOne); map.putAll("someKey", stringsTwo); Iterator<String> stringIterator = map.get("someKey") .distributedIterator("iterator_{set}", "one*", 10); // read some elements using iterator List<String> strings = new ArrayList<>(); for (int i = 0; i < 64; i++) { if (stringIterator.hasNext()) { strings.add(stringIterator.next()); } } // create another iterator instance using the same name RSetMultimap<String, String> map2 = redisson.getSetMultimap("set", StringCodec.INSTANCE); Iterator<String> stringIterator2 = map2.get("someKey") .distributedIterator("iterator_{set}", "one*", 10); assertTrue(stringIterator2.hasNext()); // read all remaining elements stringIterator2.forEachRemaining(strings::add); stringIterator.forEachRemaining(strings::add); assertThat(strings).containsAll(stringsOne); assertThat(strings).hasSize(stringsOne.size()); }
@Override public Path copy(final Path file, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { try { final BrickApiClient client = new BrickApiClient(session); if(status.isExists()) { if(log.isWarnEnabled()) { log.warn(String.format("Delete file %s to be replaced with %s", target, file)); } new BrickDeleteFeature(session).delete(Collections.singletonList(target), callback, new Delete.DisabledCallback()); } final FileActionEntity entity = new FileActionsApi(client) .copy(new CopyPathBody().destination(StringUtils.removeStart(target.getAbsolute(), String.valueOf(Path.DELIMITER))), StringUtils.removeStart(file.getAbsolute(), String.valueOf(Path.DELIMITER))); listener.sent(status.getLength()); if(entity.getFileMigrationId() != null) { this.poll(client, entity); } return target.withAttributes(file.attributes()); } catch(ApiException e) { throw new BrickExceptionMappingService().map("Cannot copy {0}", e, file); } }
@Test public void testCopyToExistingFile() throws Exception { final Path folder = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); new BrickDirectoryFeature(session).mkdir(folder, new TransferStatus()); final Path test = new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final Local local = new Local(System.getProperty("java.io.tmpdir"), test.getName()); final byte[] random = RandomUtils.nextBytes(2547); IOUtils.write(random, local.getOutputStream(false)); final TransferStatus status = new TransferStatus().withLength(random.length); new BrickUploadFeature(session, new BrickWriteFeature(session)).upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(), status, new DisabledLoginCallback()); local.delete(); assertTrue(new BrickFindFeature(session).find(test)); final Path copy = new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new BrickTouchFeature(session).touch(copy, new TransferStatus()); new BrickCopyFeature(session).copy(test, copy, new TransferStatus().exists(true), new DisabledConnectionCallback(), new DisabledStreamListener()); final Find find = new DefaultFindFeature(session); assertTrue(find.find(test)); assertTrue(find.find(copy)); new BrickDeleteFeature(session).delete(Arrays.asList(test, copy), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static Map<String, ShardingSphereSchema> build(final GenericSchemaBuilderMaterial material) throws SQLException { return build(getAllTableNames(material.getRules()), material); }
@Test void assertLoadWithNotExistedTableName() throws SQLException { Collection<String> tableNames = Collections.singletonList("invalid_table"); when(MetaDataLoader.load(any())).thenReturn(createSchemaMetaDataMap(tableNames, material)); assertTrue(GenericSchemaBuilder.build(tableNames, material).get(DefaultDatabase.LOGIC_NAME).getTables().isEmpty()); }
@Override public List<NamespaceFile> findAllFilesMatching(final Predicate<Path> predicate) throws IOException { return all().stream().filter(it -> predicate.test(it.path(true))).toList(); }
@Test void shouldReturnNoNamespaceFileForEmptyNamespace() throws IOException { // Given final String namespaceId = "io.kestra." + IdUtils.create(); final InternalNamespace namespace = new InternalNamespace(logger, null, namespaceId, storageInterface); List<NamespaceFile> namespaceFiles = namespace.findAllFilesMatching((unused) -> true); assertThat(namespaceFiles.size(), is(0)); }
public String getEcosystem(DefCveItem cve) { final int[] ecosystemMap = new int[ECOSYSTEMS.length]; cve.getCve().getDescriptions().stream() .filter((langString) -> (langString.getLang().equals("en"))) .forEachOrdered((langString) -> search(langString.getValue(), ecosystemMap)); return getResult(ecosystemMap); }
@Test public void testPhpLinksDoNotCountScoring() throws IOException { DescriptionEcosystemMapper mapper = new DescriptionEcosystemMapper(); String value = "Read more at https://domain/help.php."; assertNull(mapper.getEcosystem(asCve(value))); }
@VisibleForTesting static boolean areProxyPropertiesSet(String protocol) { return PROXY_PROPERTIES.stream() .anyMatch(property -> System.getProperty(protocol + "." + property) != null); }
@Test public void testAreProxyPropertiesSet_ignoresHttpNonProxyHosts() { System.setProperty("http.nonProxyHosts", "non proxy hosts"); Assert.assertFalse(MavenSettingsProxyProvider.areProxyPropertiesSet("http")); Assert.assertFalse(MavenSettingsProxyProvider.areProxyPropertiesSet("https")); }
@JsonCreator public static Duration parse(String duration) { final Matcher matcher = DURATION_PATTERN.matcher(duration); if (!matcher.matches()) { throw new IllegalArgumentException("Invalid duration: " + duration); } final long count = Long.parseLong(matcher.group(1)); final TimeUnit unit = SUFFIXES.get(matcher.group(2)); if (unit == null) { throw new IllegalArgumentException("Invalid duration: " + duration + ". Wrong time unit"); } return new Duration(count, unit); }
@Test void unableParseWrongDurationCount() { assertThatIllegalArgumentException().isThrownBy(() -> Duration.parse("five seconds")); }
@Override public void updateCouponTemplate(CouponTemplateUpdateReqVO updateReqVO) { // 校验存在 CouponTemplateDO couponTemplate = validateCouponTemplateExists(updateReqVO.getId()); // 校验发放数量不能过小 if (updateReqVO.getTotalCount() < couponTemplate.getTakeCount()) { throw exception(COUPON_TEMPLATE_TOTAL_COUNT_TOO_SMALL, couponTemplate.getTakeCount()); } // 校验商品范围 validateProductScope(updateReqVO.getProductScope(), updateReqVO.getProductScopeValues()); // 更新 CouponTemplateDO updateObj = CouponTemplateConvert.INSTANCE.convert(updateReqVO); couponTemplateMapper.updateById(updateObj); }
@Test public void testUpdateCouponTemplate_success() { // mock 数据 CouponTemplateDO dbCouponTemplate = randomPojo(CouponTemplateDO.class); couponTemplateMapper.insert(dbCouponTemplate);// @Sql: 先插入出一条存在的数据 // 准备参数 CouponTemplateUpdateReqVO reqVO = randomPojo(CouponTemplateUpdateReqVO.class, o -> { o.setId(dbCouponTemplate.getId()); // 设置更新的 ID // 其它通用字段 o.setProductScope(randomEle(PromotionProductScopeEnum.values()).getScope()) .setValidityType(randomEle(CouponTemplateValidityTypeEnum.values()).getType()) .setDiscountType(randomEle(PromotionDiscountTypeEnum.values()).getType()); }); // 调用 couponTemplateService.updateCouponTemplate(reqVO); // 校验是否更新正确 CouponTemplateDO couponTemplate = couponTemplateMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, couponTemplate); }
@Override public BitMask setAll(BitMask mask) { if (mask instanceof LongBitMask) { this.mask |= ((LongBitMask) mask).asLong(); } else if (mask instanceof AllSetBitMask) { return AllSetBitMask.get(); } else if (mask instanceof AllSetButLastBitMask) { return isSet(0) ? AllSetBitMask.get() : AllSetButLastBitMask.get(); } else if (mask instanceof OpenBitSet) { return mask.setAll(this); } else if (mask instanceof EmptyButLastBitMask) { return set(0); } return this; }
@Test public void testSetAll() { assertThat(new LongBitMask().setAll(new LongBitMask()).toString()).isEqualTo("0"); assertThat(new LongBitMask().setAll(AllSetBitMask.get()).toString()).isEqualTo("-1"); assertThat(new LongBitMask().setAll(AllSetButLastBitMask.get()).toString()).isEqualTo("9223372036854775807"); assertThat(new LongBitMask(1).setAll(AllSetButLastBitMask.get()).toString()).isEqualTo("-1"); assertThat(new LongBitMask().setAll(new OpenBitSet()).toString()).isEqualTo("0"); assertThat(new LongBitMask().setAll(EmptyButLastBitMask.get()).toString()).isEqualTo("1"); assertThat(new LongBitMask().setAll(EmptyBitMask.get()).toString()).isEqualTo("0"); }
@Override public EntryEventType getEventType() { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void testGetEventType() { batchIMapEvent.getEventType(); }
@Override public boolean consume(CodeReader code, TokenQueue output) { if (code.popTo(matcher, tmpBuilder) > 0) { // see SONAR-2499 Cursor previousCursor = code.getPreviousCursor(); if (normalizationValue != null) { output.add(new Token(normalizationValue, previousCursor.getLine(), previousCursor.getColumn())); } else { output.add(new Token(tmpBuilder.toString(), previousCursor.getLine(), previousCursor.getColumn())); } // Godin: note that other channels use method delete in order to do the same thing tmpBuilder.setLength(0); return true; } return false; }
@Test public void shouldConsume() { TokenChannel channel = new TokenChannel("ABC"); TokenQueue output = mock(TokenQueue.class); CodeReader codeReader = new CodeReader("ABCD"); assertThat(channel.consume(codeReader, output), is(true)); ArgumentCaptor<Token> token = ArgumentCaptor.forClass(Token.class); verify(output).add(token.capture()); assertThat(token.getValue(), is(new Token("ABC", 1, 0))); verifyNoMoreInteractions(output); assertThat(codeReader.getLinePosition(), is(1)); assertThat(codeReader.getColumnPosition(), is(3)); }
public static <T> RetryTransformer<T> of(Retry retry) { return new RetryTransformer<>(retry); }
@Test public void returnOnCompleteUsingCompletable() throws InterruptedException { RetryConfig config = retryConfig(); Retry retry = Retry.of("testName", config); RetryTransformer<Object> retryTransformer = RetryTransformer.of(retry); doNothing() .doThrow(new HelloWorldException()) .doThrow(new HelloWorldException()) .doNothing() .when(helloWorldService).sayHelloWorld(); Completable.fromRunnable(helloWorldService::sayHelloWorld) .compose(retryTransformer) .test() .await() .assertNoValues() .assertComplete(); Completable.fromRunnable(helloWorldService::sayHelloWorld) .compose(retryTransformer) .test() .await() .assertNoValues() .assertComplete(); then(helloWorldService).should(times(4)).sayHelloWorld(); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfSuccessfulCallsWithoutRetryAttempt()).isEqualTo(1); assertThat(metrics.getNumberOfSuccessfulCallsWithRetryAttempt()).isEqualTo(1); assertThat(metrics.getNumberOfFailedCallsWithRetryAttempt()).isZero(); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isZero(); }
protected @Nullable E uniqueResult(CriteriaQuery<E> criteriaQuery) throws HibernateException { return uniqueElement( currentSession() .createQuery(requireNonNull(criteriaQuery)) .getResultList() ); }
@Test void returnsUniqueResultsFromQueries() throws Exception { when(query.uniqueResult()).thenReturn("woo"); assertThat(dao.uniqueResult(query)) .isEqualTo("woo"); }
public static List<String> shellSplit(CharSequence string) { List<String> tokens = new ArrayList<>(); if ( string == null ) { return tokens; } boolean escaping = false; char quoteChar = ' '; boolean quoting = false; StringBuilder current = new StringBuilder() ; for (int i = 0; i<string.length(); i++) { char c = string.charAt(i); if (escaping) { current.append(c); escaping = false; } else if (c == '\\' && !(quoting && quoteChar == '\'')) { escaping = true; } else if (quoting && c == quoteChar) { quoting = false; } else if (!quoting && (c == '\'' || c == '"')) { quoting = true; quoteChar = c; } else if (!quoting && Character.isWhitespace(c)) { if (current.length() > 0) { tokens.add(current.toString()); current = new StringBuilder(); } } else { current.append(c); } } if (current.length() > 0) { tokens.add(current.toString()); } return tokens; }
@Test public void backToBackQuotedStringsShouldFormSingleToken() { assertEquals(List.of("foobarbaz"), StringUtils.shellSplit("\"foo\"'bar'baz")); assertEquals(List.of("three four"), StringUtils.shellSplit("\"three\"' 'four")); }
@Override public boolean start() throws IOException { LOG.info("Starting reader using {}", initCheckpoint); try { shardSubscribersPool = createPool(); shardSubscribersPool().start(initCheckpoint); return advance(); } catch (TransientKinesisException e) { throw new IOException(e); } }
@Test public void startReturnsFalseIfNoDataAtTheBeginning() throws IOException { assertThat(reader.start()).isFalse(); }
@Override public void configure(final String name, final Collection<String> healthCheckUrlPaths, final HealthResponseProvider healthResponseProvider, final HealthEnvironment health, final JerseyEnvironment jersey, final ServletEnvironment servlets, final ObjectMapper mapper) { final ServletHealthResponder servlet = new ServletHealthResponder(healthResponseProvider, cacheControlEnabled, cacheControlValue); servlets .addServlet(name + SERVLET_SUFFIX, servlet) .addMapping(healthCheckUrlPaths.toArray(new String[0])); }
@Test void testBuildHealthServletWithCacheControlDisabled() throws Exception { // given HealthResponderFactory factory = configFactory.build(new ResourceConfigurationSourceProvider(), "/yml/servlet-responder-factory-caching-header-disabled.yml"); setupServletStubbing(); // when // succeed first, fail second when(healthResponseProvider.healthResponse(Collections.emptyMap())).thenReturn(SUCCESS, FAIL); factory.configure(NAME, Collections.singletonList(HEALTH_CHECK_URI), healthResponseProvider, health, jersey, servlets, mapper); servletTester.addServlet(new ServletHolder(servletCaptor.getValue()), HEALTH_CHECK_URI); servletTester.start(); HttpTester.Response healthyResponse = executeRequest(request); HttpTester.Response unhealthyResponse = executeRequest(request); // then assertThat(healthyResponse.getStatus()).isEqualTo(Response.SC_OK); assertThat(healthyResponse.get(HttpHeader.CACHE_CONTROL)).isNull(); assertThat(unhealthyResponse.getStatus()).isEqualTo(Response.SC_SERVICE_UNAVAILABLE); assertThat(unhealthyResponse.get(HttpHeader.CACHE_CONTROL)).isNull(); }
public void putValue(String fieldName, @Nullable Object value) { _fieldToValueMap.put(fieldName, value); }
@Test public void testDifferentNumberOfKeysWithNoSameValueNotEqual() { GenericRow first = new GenericRow(); first.putValue("one", 1); first.putValue("two", 2); GenericRow second = new GenericRow(); second.putValue("one", "one"); Assert.assertNotEquals(first, second); }
@Override public void addOutputs(ObjectId streamId, Collection<ObjectId> outputIds) { final BasicDBList outputs = new BasicDBList(); outputs.addAll(outputIds); collection(StreamImpl.class).update( db(StreamImpl.FIELD_ID, streamId), db("$addToSet", new BasicDBObject(StreamImpl.FIELD_OUTPUTS, new BasicDBObject("$each", outputs))) ); clusterEventBus.post(StreamsChangedEvent.create(streamId.toHexString())); }
@Test @MongoDBFixtures("someStreamsWithoutAlertConditions.json") public void addOutputs() throws NotFoundException { final ObjectId streamId = new ObjectId("5628f4503b0c5756a8eebc4d"); final ObjectId output1Id = new ObjectId("5628f4503b00deadbeef0001"); final ObjectId output2Id = new ObjectId("5628f4503b00deadbeef0002"); final Output output1 = mock(Output.class); final Output output2 = mock(Output.class); when(output1.getId()).thenReturn(output1Id.toHexString()); when(output2.getId()).thenReturn(output2Id.toHexString()); when(outputService.load(output1Id.toHexString())).thenReturn(output1); when(outputService.load(output2Id.toHexString())).thenReturn(output2); streamService.addOutputs(streamId, ImmutableSet.of(output1Id, output2Id)); final Stream stream = streamService.load(streamId.toHexString()); assertThat(stream.getOutputs()) .anySatisfy(output -> assertThat(output.getId()).isEqualTo(output1Id.toHexString())) .anySatisfy(output -> assertThat(output.getId()).isEqualTo(output2Id.toHexString())); }
public <T> T fromXmlPartial(String partial, Class<T> o) throws Exception { return fromXmlPartial(toInputStream(partial, UTF_8), o); }
@Test void autoUpdateShouldUnderstandTrue() throws Exception { MaterialConfig hgMaterial = xmlLoader.fromXmlPartial("<hg url=\"file:///tmp/testSvnRepo/project1/trunk\" autoUpdate=\"true\"/>", HgMaterialConfig.class); assertThat(hgMaterial.isAutoUpdate()).isTrue(); }
public void cleanRuleData() { RULE_MAP.clear(); }
@Test public void testCleanRuleData() throws NoSuchFieldException, IllegalAccessException { RuleData firstCachedRuleData = RuleData.builder().id("1").selectorId(mockSelectorId1).build(); RuleData secondCachedRuleData = RuleData.builder().id("2").selectorId(mockSelectorId2).build(); ConcurrentHashMap<String, List<RuleData>> ruleMap = getFieldByName(ruleMapStr); ruleMap.put(mockSelectorId1, Lists.newArrayList(firstCachedRuleData)); ruleMap.put(mockSelectorId2, Lists.newArrayList(secondCachedRuleData)); BaseDataCache.getInstance().cleanRuleData(); assertNull(ruleMap.get(mockSelectorId1)); assertNull(ruleMap.get(mockSelectorId2)); }
public static void addGetCreatedKiePMMLMiningFieldsMethod(final ClassOrInterfaceDeclaration modelTemplate, final List<org.dmg.pmml.MiningField> miningFields, final List<org.dmg.pmml.Field<?>> fields) { final MethodDeclaration methodDeclaration = modelTemplate.addMethod(GET_CREATED_KIEPMMLMININGFIELDS, Modifier.Keyword.PRIVATE); final ClassOrInterfaceType returnedType = getTypedClassOrInterfaceTypeByTypeNames(List.class.getSimpleName(), Collections.singletonList(KiePMMLMiningField.class.getSimpleName())); methodDeclaration.setType(returnedType); commonPopulateGetCreatedKiePMMLMiningFieldsMethod(methodDeclaration, miningFields, fields); }
@Test void addGetCreatedKiePMMLMiningFieldsMethod() throws IOException { final CompilationDTO compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmmlModel, model, new PMMLCompilationContextMock(), SOURCE_BASE); ClassOrInterfaceDeclaration modelTemplate = new ClassOrInterfaceDeclaration(); org.kie.pmml.compiler.commons.codegenfactories.KiePMMLModelFactoryUtils.addGetCreatedKiePMMLMiningFieldsMethod(modelTemplate, compilationDTO.getMiningSchema().getMiningFields(), compilationDTO.getFields()); final MethodDeclaration retrieved = modelTemplate.getMethodsByName(GET_CREATED_KIEPMMLMININGFIELDS).get(0); String text = getFileContent(TEST_12_SOURCE); BlockStmt expected = JavaParserUtils.parseBlock(text); assertThat(JavaParserUtils.equalsNode(expected, retrieved.getBody().get())).isTrue(); }
@Override protected URI getOrigin(final Path container, final Distribution.Method method) throws BackgroundException { if(Distribution.WEBSITE_CDN.equals(method)) { return URI.create(String.format("http://%s", this.getWebsiteHostname(container))); } return super.getOrigin(container, method); }
@Test public void testGetOrigin() throws Exception { final WebsiteCloudFrontDistributionConfiguration configuration = new WebsiteCloudFrontDistributionConfiguration( session, new S3LocationFeature(session), new DisabledX509TrustManager(), new DefaultX509KeyManager() ); final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); assertEquals("test-eu-central-1-cyberduck.s3.amazonaws.com", configuration.getOrigin(container, Distribution.DOWNLOAD).getHost()); assertEquals("test-eu-central-1-cyberduck.s3.amazonaws.com", configuration.getOrigin(container, Distribution.WEBSITE).getHost()); assertEquals("test-eu-central-1-cyberduck.s3-website-eu-central-1.amazonaws.com", configuration.getOrigin(container, Distribution.WEBSITE_CDN).getHost()); }
@Override public boolean createReservation(ReservationId reservationId, String user, Plan plan, ReservationDefinition contract) throws PlanningException { LOG.info("placing the following ReservationRequest: " + contract); try { boolean res = planner.createReservation(reservationId, user, plan, contract); if (res) { LOG.info("OUTCOME: SUCCESS, Reservation ID: " + reservationId.toString() + ", Contract: " + contract.toString()); } else { LOG.info("OUTCOME: FAILURE, Reservation ID: " + reservationId.toString() + ", Contract: " + contract.toString()); } return res; } catch (PlanningException e) { LOG.info("OUTCOME: FAILURE, Reservation ID: " + reservationId.toString() + ", Contract: " + contract.toString()); throw e; } }
@SuppressWarnings("javadoc") @Test public void testSimple() throws PlanningException { prepareBasicPlan(); // create a request with a single atomic ask ReservationDefinition rr = new ReservationDefinitionPBImpl(); rr.setArrival(5 * step); rr.setDeadline(20 * step); rr.setRecurrenceExpression(recurrenceExpression); ReservationRequest r = ReservationRequest.newInstance( Resource.newInstance(2048, 2), 10, 5, 10 * step); ReservationRequests reqs = new ReservationRequestsPBImpl(); reqs.setReservationResources(Collections.singletonList(r)); rr.setReservationRequests(reqs); ReservationId reservationID = ReservationSystemTestUtil .getNewReservationId(); agent.createReservation(reservationID, "u1", plan, rr); assertTrue("Agent-based allocation failed", reservationID != null); assertTrue("Agent-based allocation failed", plan.getAllReservations() .size() == 3); ReservationAllocation cs = plan.getReservationById(reservationID); System.out.println("--------AFTER SIMPLE ALLOCATION (queue: " + reservationID + ")----------"); System.out.println(plan.toString()); System.out.println(plan.toCumulativeString()); if(allocateLeft){ for (long i = 5 * step; i < 15 * step; i++) { assertTrue( "Agent-based allocation unexpected", Resources.equals(cs.getResourcesAtTime(i), Resource.newInstance(2048 * 10, 2 * 10))); } } else { for (long i = 10 * step; i < 20 * step; i++) { assertTrue( "Agent-based allocation unexpected", Resources.equals(cs.getResourcesAtTime(i), Resource.newInstance(2048 * 10, 2 * 10))); } } }
@Override public boolean hasChanged() { if ( changedFlag.hasChanged() ) { return true; } if ( haveConnectionsChanged() ) { return true; } if ( haveNotesChanged() ) { return true; } return false; }
@Test public void testHasChanged() { meta.clear(); assertFalse( meta.hasChanged() ); meta.setChanged( true ); assertTrue( meta.hasChanged() ); }
private boolean handleCommitException(Throwable e) { log.warn("FileSegment commit exception, filePath={}", this.filePath, e); // Get root cause here Throwable rootCause = e.getCause() != null ? e.getCause() : e; long fileSize = rootCause instanceof TieredStoreException ? ((TieredStoreException) rootCause).getPosition() : this.getSize(); long expectPosition = commitPosition + fileSegmentInputStream.getContentLength(); if (fileSize == GET_FILE_SIZE_ERROR) { log.error("Get file size error after commit, FileName: {}, Commit: {}, Content: {}, Expect: {}, Append: {}", this.getPath(), commitPosition, fileSegmentInputStream.getContentLength(), expectPosition, appendPosition); return false; } if (correctPosition(fileSize)) { fileSegmentInputStream = null; return true; } else { fileSegmentInputStream.rewind(); return false; } }
@Test public void handleCommitExceptionTest() { MetadataStore metadataStore = new DefaultMetadataStore(storeConfig); FileSegmentFactory factory = new FileSegmentFactory(metadataStore, storeConfig); { FileSegment fileSegment = factory.createCommitLogFileSegment(MessageStoreUtil.toFilePath(mq), baseOffset); FileSegment fileSpySegment = Mockito.spy(fileSegment); fileSpySegment.append(MessageFormatUtilTest.buildMockedMessageBuffer(), 0L); fileSpySegment.append(MessageFormatUtilTest.buildMockedMessageBuffer(), 0L); Mockito.when(fileSpySegment.commit0(any(), anyLong(), anyInt(), anyBoolean())) .thenReturn(CompletableFuture.supplyAsync(() -> { throw new TieredStoreException(TieredStoreErrorCode.IO_ERROR, "Test"); })); Assert.assertFalse(fileSpySegment.commitAsync().join()); fileSegment.destroyFile(); } { FileSegment fileSegment = factory.createCommitLogFileSegment(MessageStoreUtil.toFilePath(mq), baseOffset); FileSegment fileSpySegment = Mockito.spy(fileSegment); fileSpySegment.append(MessageFormatUtilTest.buildMockedMessageBuffer(), 0L); fileSpySegment.append(MessageFormatUtilTest.buildMockedMessageBuffer(), 0L); Mockito.when(fileSpySegment.commit0(any(), anyLong(), anyInt(), anyBoolean())) .thenReturn(CompletableFuture.supplyAsync(() -> { long size = MessageFormatUtilTest.buildMockedMessageBuffer().remaining(); TieredStoreException exception = new TieredStoreException(TieredStoreErrorCode.IO_ERROR, "Test"); exception.setPosition(size * 2L); throw exception; })); Assert.assertTrue(fileSpySegment.commitAsync().join()); fileSegment.destroyFile(); } { FileSegment fileSegment = factory.createCommitLogFileSegment(MessageStoreUtil.toFilePath(mq), baseOffset); FileSegment fileSpySegment = Mockito.spy(fileSegment); fileSpySegment.append(MessageFormatUtilTest.buildMockedMessageBuffer(), 0L); fileSpySegment.append(MessageFormatUtilTest.buildMockedMessageBuffer(), 0L); Mockito.when(fileSpySegment.commit0(any(), anyLong(), anyInt(), anyBoolean())) .thenReturn(CompletableFuture.supplyAsync(() -> { throw new RuntimeException("Runtime Error for Test"); })); Mockito.when(fileSpySegment.getSize()).thenReturn(0L); Assert.assertFalse(fileSpySegment.commitAsync().join()); } }
public static Environment resolveAnyOfEnvironment( Environment environment, String... preferredEnvironmentTypes) { List<Environment> allEnvironments = expandAnyOfEnvironments(environment); for (String urn : preferredEnvironmentTypes) { for (Environment env : allEnvironments) { if (urn.equals(env.getUrn())) { return env; } } } return allEnvironments.iterator().next(); }
@Test public void testResolveAnyOfEnvironment() { Environment dockerEnv = Environments.createDockerEnvironment("A"); Environment processEnv = Environments.createProcessEnvironment("os", "arch", "cmd", new HashMap<>()); Environment env = Environments.createAnyOfEnvironment( dockerEnv, Environments.createAnyOfEnvironment(processEnv)); assertThat( Environments.resolveAnyOfEnvironment( env, BeamUrns.getUrn(StandardEnvironments.Environments.DOCKER)), equalTo(dockerEnv)); assertThat( Environments.resolveAnyOfEnvironment( env, BeamUrns.getUrn(StandardEnvironments.Environments.PROCESS)), equalTo(processEnv)); assertThat( Environments.resolveAnyOfEnvironment( env, BeamUrns.getUrn(StandardEnvironments.Environments.EXTERNAL)), notNullValue()); }
public Path parse(final String uri) throws HostParserException { final Host host = new HostParser(factory).get(uri); if(StringUtils.isBlank(host.getDefaultPath())) { return new Path(String.valueOf(Path.DELIMITER), EnumSet.of((Path.Type.directory))); } switch(new ContainerPathKindDetector(host.getProtocol().getFeature(PathContainerService.class)).detect(host.getDefaultPath())) { case directory: return new Path(PathNormalizer.normalize(host.getDefaultPath()), EnumSet.of(Path.Type.directory, Path.Type.volume)); } return new Path(PathNormalizer.normalize(host.getDefaultPath()), EnumSet.of( new DelimiterPathKindDetector().detect(host.getDefaultPath()))); }
@Test public void testParse() throws Exception { final CommandLineParser parser = new PosixParser(); final CommandLine input = parser.parse(new Options(), new String[]{}); final ProtocolFactory factory = new ProtocolFactory(new HashSet<>(Arrays.asList(new FTPTLSProtocol(), new S3Protocol()))); factory.register(new ProfilePlistReader(factory).read(this.getClass().getResourceAsStream("/FTP.cyberduckprofile"))); factory.register(new ProfilePlistReader(factory).read(this.getClass().getResourceAsStream("/FTPS.cyberduckprofile"))); factory.register(new ProfilePlistReader(factory).read(this.getClass().getResourceAsStream("/S3 (HTTPS).cyberduckprofile"))); assertEquals(new Path("/", EnumSet.of(Path.Type.directory)), new CommandLinePathParser(input, factory).parse("ftps://u@test.cyberduck.ch/")); assertEquals(new Path("/d", EnumSet.of(Path.Type.directory)), new CommandLinePathParser(input, factory).parse("ftps://u@test.cyberduck.ch/d/")); assertEquals(new Path("/d", EnumSet.of(Path.Type.file)), new CommandLinePathParser(input, factory).parse("ftps://u@test.cyberduck.ch/d")); assertEquals(new Path("/", EnumSet.of(Path.Type.directory)), new CommandLinePathParser(input, factory).parse("ftps://u@test.cyberduck.ch/")); assertEquals(new Path("/", EnumSet.of(Path.Type.directory)), new CommandLinePathParser(input, factory).parse("ftps://u@test.cyberduck.ch")); assertEquals(new Path("/test.cyberduck.ch", EnumSet.of(Path.Type.directory)), new CommandLinePathParser(input, factory).parse("s3:u@test.cyberduck.ch/")); assertEquals(new Path("/test.cyberduck.ch", EnumSet.of(Path.Type.directory)), new CommandLinePathParser(input, factory).parse("s3:u@test.cyberduck.ch/")); assertEquals(new Path("/test.cyberduck.ch/d", EnumSet.of(Path.Type.directory)), new CommandLinePathParser(input, factory).parse("s3:u@test.cyberduck.ch/d/")); assertEquals(new Path("/test.cyberduck.ch/d", EnumSet.of(Path.Type.directory)), new CommandLinePathParser(input, factory).parse("s3://u@/test.cyberduck.ch/d/")); assertEquals(new Path("/test.cyberduck.ch/d", EnumSet.of(Path.Type.file)), new CommandLinePathParser(input, factory).parse("s3://u@/test.cyberduck.ch/d")); assertEquals(new Path("/test.cyberduck.ch/d", EnumSet.of(Path.Type.file)), new CommandLinePathParser(input, factory).parse("s3://u@/test.cyberduck.ch/d")); // Test bucket assertEquals(new Path("/test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)), new CommandLinePathParser(input, factory).parse("s3:/test.cyberduck.ch")); }
@Transactional @Cacheable(CACHE_DATABASE_SEARCH) @CacheEvict(value = CACHE_AVERAGE_REVIEW_RATING, allEntries = true) public SearchHits<ExtensionSearch> search(ISearchService.Options options) { // grab all extensions var matchingExtensions = repositories.findAllActiveExtensions(); // no extensions in the database if (matchingExtensions.isEmpty()) { return new SearchHitsImpl<>(0,TotalHitsRelation.OFF, 0f, null, null, Collections.emptyList(), null, null); } // exlude namespaces if(options.namespacesToExclude != null) { for(var namespaceToExclude : options.namespacesToExclude) { matchingExtensions = matchingExtensions.filter(extension -> !extension.getNamespace().getName().equals(namespaceToExclude)); } } // filter target platform if(TargetPlatform.isValid(options.targetPlatform)) { matchingExtensions = matchingExtensions.filter(extension -> extension.getVersions().stream().anyMatch(ev -> ev.getTargetPlatform().equals(options.targetPlatform))); } // filter category if (options.category != null) { matchingExtensions = matchingExtensions.filter(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); return latest.getCategories().stream().anyMatch(category -> category.equalsIgnoreCase(options.category)); }); } // filter text if (options.queryString != null) { matchingExtensions = matchingExtensions.filter(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); return extension.getName().toLowerCase().contains(options.queryString.toLowerCase()) || extension.getNamespace().getName().contains(options.queryString.toLowerCase()) || (latest.getDescription() != null && latest.getDescription() .toLowerCase().contains(options.queryString.toLowerCase())) || (latest.getDisplayName() != null && latest.getDisplayName() .toLowerCase().contains(options.queryString.toLowerCase())); }); } // need to perform the sortBy () // 'relevance' | 'timestamp' | 'rating' | 'downloadCount'; Stream<ExtensionSearch> searchEntries; if("relevance".equals(options.sortBy) || "rating".equals(options.sortBy)) { var searchStats = new SearchStats(repositories); searchEntries = matchingExtensions.stream().map(extension -> relevanceService.toSearchEntry(extension, searchStats)); } else { searchEntries = matchingExtensions.stream().map(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); var targetPlatforms = repositories.findExtensionTargetPlatforms(extension); return extension.toSearch(latest, targetPlatforms); }); } var comparators = new HashMap<>(Map.of( "relevance", new RelevanceComparator(), "timestamp", new TimestampComparator(), "rating", new RatingComparator(), "downloadCount", new DownloadedCountComparator() )); var comparator = comparators.get(options.sortBy); if(comparator != null) { searchEntries = searchEntries.sorted(comparator); } var sortedExtensions = searchEntries.collect(Collectors.toList()); // need to do sortOrder // 'asc' | 'desc'; if ("desc".equals(options.sortOrder)) { // reverse the order Collections.reverse(sortedExtensions); } // Paging var totalHits = sortedExtensions.size(); var endIndex = Math.min(sortedExtensions.size(), options.requestedOffset + options.requestedSize); var startIndex = Math.min(endIndex, options.requestedOffset); sortedExtensions = sortedExtensions.subList(startIndex, endIndex); List<SearchHit<ExtensionSearch>> searchHits; if (sortedExtensions.isEmpty()) { searchHits = Collections.emptyList(); } else { // client is interested only in the extension IDs searchHits = sortedExtensions.stream().map(extensionSearch -> new SearchHit<>(null, null, null, 0.0f, null, null, null, null, null, null, extensionSearch)).collect(Collectors.toList()); } return new SearchHitsImpl<>(totalHits, TotalHitsRelation.OFF, 0f, null, null, searchHits, null, null); }
@Test public void testSortByRating() { var ext1 = mockExtension("yaml", 4.0, 1, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext2 = mockExtension("java", 5.0, 1, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext3 = mockExtension("openshift", 2.0, 1, 0, "redhat", List.of("Snippets", "Other")); var ext4 = mockExtension("foo", 1.0, 1, 0, "bar", List.of("Other")); Mockito.when(repositories.findAllActiveExtensions()).thenReturn(Streamable.of(List.of(ext1, ext2, ext3, ext4))); var searchOptions = new ISearchService.Options(null, null, TargetPlatform.NAME_UNIVERSAL, 50, 0, null, "rating", false); var result = search.search(searchOptions); // all extensions should be there assertThat(result.getTotalHits()).isEqualTo(4); // test now the order var hits = result.getSearchHits(); assertThat(getIdFromExtensionHits(hits, 0)).isEqualTo(getIdFromExtensionName("foo")); assertThat(getIdFromExtensionHits(hits, 1)).isEqualTo(getIdFromExtensionName("openshift")); assertThat(getIdFromExtensionHits(hits, 2)).isEqualTo(getIdFromExtensionName("yaml")); assertThat(getIdFromExtensionHits(hits, 3)).isEqualTo(getIdFromExtensionName("java")); }
@Override public Path move(final Path source, final Path target, final TransferStatus status, final Callback delete, final ConnectionCallback prompt) throws BackgroundException { final SMBSession.DiskShareWrapper share = session.openShare(source); try { try (DiskEntry file = share.get().open(new SMBPathContainerService(session).getKey(source), Collections.singleton(AccessMask.DELETE), Collections.singleton(FileAttributes.FILE_ATTRIBUTE_NORMAL), Collections.singleton(SMB2ShareAccess.FILE_SHARE_READ), SMB2CreateDisposition.FILE_OPEN, Collections.singleton(source.isDirectory() ? SMB2CreateOptions.FILE_DIRECTORY_FILE : SMB2CreateOptions.FILE_NON_DIRECTORY_FILE))) { file.rename(new SmbPath(share.get().getSmbPath(), new SMBPathContainerService(session).getKey(target)).getPath(), status.isExists()); } } catch(SMBRuntimeException e) { throw new SMBExceptionMappingService().map("Cannot rename {0}", e, source); } finally { session.releaseShare(share); } // Copy original file attributes return target.withAttributes(source.attributes()); }
@Test(expected = NotfoundException.class) public void testMoveNotFound() throws Exception { final Path workdir = new DefaultHomeFinderService(session).find(); final Path test = new Path(workdir, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final Path target = new Path(workdir, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new SMBMoveFeature(session).move(test, target, new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); }
@Override protected Upstream doSelect(final List<Upstream> upstreamList, final String ip) { Map<String, Upstream> domainMap = upstreamList.stream() .collect(Collectors.toConcurrentMap(Upstream::buildDomain, upstream -> upstream)); domainMap.keySet().stream() .filter(key -> !countMap.containsKey(key)) .forEach(domain -> countMap.put(domain, Long.MIN_VALUE)); final String domain = countMap.entrySet().stream() // Ensure that the filtered domain is included in the domainMap. .filter(entry -> domainMap.containsKey(entry.getKey())) .min(Comparator.comparingLong(Map.Entry::getValue)) .map(Map.Entry::getKey) .orElse(upstreamList.get(0).buildDomain()); countMap.computeIfPresent(domain, (key, actived) -> Optional.of(actived).orElse(Long.MIN_VALUE) + 1); return domainMap.get(domain); }
@Test public void testResponseTimeBalancer() throws Exception { buildUpstreamList(); final LeastActiveLoadBalance leastActiveLoadBalance = new LeastActiveLoadBalance(); Upstream upstream = leastActiveLoadBalance.doSelect(onlyOneList, "localhost"); Upstream upstream1 = leastActiveLoadBalance.doSelect(onlyOneList, "localhost"); Assertions.assertTrue((upstream.getUrl().equals("baidu.com") && upstream1.getUrl().equals("pro.jd.com")) || upstream1.getUrl().equals("baidu.com") && upstream.getUrl().equals("pro.jd.com")); }
@Deprecated public static <K, V> PTransform<PCollection<? extends KV<K, TimestampedValue<V>>>, PCollection<KV<K, V>>> extractFromValues() { return new ExtractTimestampsFromValues<>(); }
@Test @Category(ValidatesRunner.class) public void extractFromValuesSucceeds() { PCollection<KV<String, TimestampedValue<Integer>>> preified = pipeline.apply( Create.of( KV.of("foo", TimestampedValue.of(0, new Instant(0))), KV.of("foo", TimestampedValue.of(1, new Instant(1))), KV.of("bar", TimestampedValue.of(2, new Instant(2))), KV.of("baz", TimestampedValue.of(3, new Instant(3))))); PCollection<KV<String, Integer>> timestamped = preified.apply(ReifyTimestamps.extractFromValues()); PAssert.that(timestamped) .containsInAnyOrder(KV.of("foo", 0), KV.of("foo", 1), KV.of("bar", 2), KV.of("baz", 3)); timestamped.apply( "AssertElementTimestamps", ParDo.of( new DoFn<KV<String, Integer>, Void>() { @ProcessElement public void verifyTimestampsEqualValue(ProcessContext context) { assertThat( new Instant(context.element().getValue().longValue()), equalTo(context.timestamp())); } })); pipeline.run(); }
public Expression rewrite(final Expression expression) { return new ExpressionTreeRewriter<>(new OperatorPlugin()::process) .rewrite(expression, null); }
@Test public void shouldReplaceBetweenWindowEndAndStrings() { // Given: final Expression predicate = getPredicate( "SELECT * FROM orders where WINDOWEND BETWEEN '2017-01-01' AND '2017-02-01';"); // When: final Expression rewritten = rewriter.rewrite(predicate); // Then: assertThat( rewritten.toString(), is(String.format("(WINDOWEND BETWEEN %d AND %d)", A_TIMESTAMP, ANOTHER_TIMESTAMP)) ); }
@Override public String getMessageEncoding() { return MESSAGE_ENCODING; }
@Test void getMessageEncoding() { Assertions.assertEquals("identity", Identity.IDENTITY.getMessageEncoding()); }
public static <T> T readObjectFromConfAsBase64(String key, Configuration conf) throws IOException { return readObjectFromConfAsBase64(key, new HadoopParquetConfiguration(conf)); }
@Test public void readObjectFromConfAsBase64UnsetKey() throws Exception { assertNull(SerializationUtil.readObjectFromConfAsBase64("non-existant-key", new Configuration())); }
@Override public Collection<String> listSchemas(Connection connection) { try (ResultSet resultSet = connection.getMetaData().getSchemas()) { ImmutableSet.Builder<String> schemaNames = ImmutableSet.builder(); while (resultSet.next()) { String schemaName = resultSet.getString("TABLE_SCHEM"); // skip internal schemas if (!schemaName.equalsIgnoreCase("INFORMATION_SCHEMA") && !schemaName.equalsIgnoreCase("system")) { schemaNames.add(schemaName); } } return schemaNames.build(); } catch (SQLException e) { throw new StarRocksConnectorException(e.getMessage()); } }
@Test public void testListSchemas() throws SQLException { new Expectations() { { dataSource.getConnection(); result = connection; minTimes = 0; connection.getMetaData().getSchemas(); result = dbResult; minTimes = 0; } }; JDBCMetadata jdbcMetadata = new JDBCMetadata(properties, "catalog", dataSource); List<String> result = jdbcMetadata.listDbNames(); List<String> expectResult = Lists.newArrayList("clickhouse", "template1", "test"); Assert.assertEquals(expectResult, result); }
public static boolean hasGetter(Class<?> clazz) { if (ClassUtil.isNormalClass(clazz)) { for (Method method : clazz.getMethods()) { if (method.getParameterCount() == 0) { final String name = method.getName(); if (name.startsWith("get") || name.startsWith("is")) { if (false == "getClass".equals(name)) { return true; } } } } } return false; }
@Test public void hasGetterTest() { // https://gitee.com/dromara/hutool/issues/I6M7Z7 final boolean b = BeanUtil.hasGetter(Object.class); assertFalse(b); }
@Override public Local create(final Path file) { return this.create(String.format("%s-%s", new AlphanumericRandomStringService().random(), file.getName())); }
@Test public void testExists() { { final Local f = new FlatTemporaryFileService().create(new AlphanumericRandomStringService().random()); assertFalse(f.exists()); assertTrue(f.getParent().exists()); assertTrue(f.getParent().getParent().exists()); } { final Path file = new Path("/p/f", EnumSet.of(Path.Type.file)); final Local f = new FlatTemporaryFileService().create(new AlphanumericRandomStringService().random(), file); assertFalse(f.exists()); assertTrue(f.getParent().exists()); assertTrue(f.getParent().getParent().exists()); } }
public RowExpression extract(PlanNode node) { return node.accept(new Visitor(domainTranslator, functionAndTypeManager), null); }
@Test public void testSemiJoin() { PlanNode node = new SemiJoinNode( Optional.empty(), newId(), filter(baseTableScan, and(greaterThan(AV, bigintLiteral(10)), lessThan(AV, bigintLiteral(100)))), filter(baseTableScan, greaterThan(AV, bigintLiteral(5))), AV, BV, CV, Optional.empty(), Optional.empty(), Optional.empty(), ImmutableMap.of()); RowExpression effectivePredicate = effectivePredicateExtractor.extract(node); // Currently, only pull predicates through the source plan assertEquals(normalizeConjuncts(effectivePredicate), normalizeConjuncts(and(greaterThan(AV, bigintLiteral(10)), lessThan(AV, bigintLiteral(100))))); }
public static UWildcardType create(BoundKind boundKind, UType bound) { return new AutoValue_UWildcardType(boundKind, bound); }
@Test public void equality() { UType objectType = UClassType.create("java.lang.Object", ImmutableList.<UType>of()); UType setType = UClassType.create("java.util.Set", ImmutableList.<UType>of(objectType)); new EqualsTester() .addEqualityGroup(UWildcardType.create(BoundKind.UNBOUND, objectType)) // ? .addEqualityGroup(UWildcardType.create(BoundKind.EXTENDS, objectType)) // ? extends Object .addEqualityGroup(UWildcardType.create(BoundKind.EXTENDS, setType)) // ? extends Set<Object> .addEqualityGroup(UWildcardType.create(BoundKind.SUPER, setType)) // ? super Set<Object> .testEquals(); }
public static boolean areExceptionsPresentInChain(Throwable error, Class ... types) { while (error != null) { for (Class type : types) { if (type.isInstance(error)) { return true; } } error = error.getCause(); } return false; }
@Test public void testAreExceptionsPresentInChain3() { assertTrue(Exceptions.areExceptionsPresentInChain(new IllegalArgumentException(new IllegalStateException()), IllegalStateException.class)); }
@Override protected Release findLatestActiveRelease(String configAppId, String configClusterName, String configNamespace, ApolloNotificationMessages clientMessages) { return releaseService.findLatestActiveRelease(configAppId, configClusterName, configNamespace); }
@Test public void testLoadConfigWithReleaseNotFound() throws Exception { when(releaseService.findLatestActiveRelease(someConfigAppId, someClusterName, defaultNamespaceName)) .thenReturn(null); Release release = configService .loadConfig(someClientAppId, someClientIp, someClientLabel, someConfigAppId, someClusterName, defaultNamespaceName, someDataCenter, someNotificationMessages); assertNull(release); }
@Override public void getConfig(MetricsNodesConfig.Builder builder) { builder.node.addAll(MetricsNodesConfigGenerator.generate(getContainers())); }
@Test void unnecessary_bundles_are_not_installed() { VespaModel model = getModel(servicesWithAdminOnly(), self_hosted); PlatformBundlesConfig config = model.getConfig(PlatformBundlesConfig.class, CLUSTER_CONFIG_ID); Set<String> unnecessaryBundles = Stream.concat ( PlatformBundles.VESPA_SECURITY_BUNDLES.stream(), PlatformBundles.VESPA_ZK_BUNDLES.stream() ).map(Path::toString).collect(toSet()); assertTrue(config.bundlePaths().stream() .noneMatch(unnecessaryBundles::contains)); }
public URI buildEncodedUri(String endpointUrl) { if (endpointUrl == null) { throw new RuntimeException("Url string cannot be null!"); } if (endpointUrl.isEmpty()) { throw new RuntimeException("Url string cannot be empty!"); } URI uri = UriComponentsBuilder.fromUriString(endpointUrl).build().encode().toUri(); if (uri.getScheme() == null || uri.getScheme().isEmpty()) { throw new RuntimeException("Transport scheme(protocol) must be provided!"); } boolean authorityNotValid = uri.getAuthority() == null || uri.getAuthority().isEmpty(); boolean hostNotValid = uri.getHost() == null || uri.getHost().isEmpty(); if (authorityNotValid || hostNotValid) { throw new RuntimeException("Url string is invalid!"); } return uri; }
@Test public void testBuildInvalidUri() { Mockito.when(client.buildEncodedUri(any())).thenCallRealMethod(); String url = "aaa"; assertThatThrownBy(() -> client.buildEncodedUri(url)); }
@Override public MergedResult merge(final List<QueryResult> queryResults, final SQLStatementContext sqlStatementContext, final ShardingSphereDatabase database, final ConnectionContext connectionContext) throws SQLException { if (1 == queryResults.size() && !isNeedAggregateRewrite(sqlStatementContext)) { return new IteratorStreamMergedResult(queryResults); } Map<String, Integer> columnLabelIndexMap = getColumnLabelIndexMap(queryResults.get(0)); SelectStatementContext selectStatementContext = (SelectStatementContext) sqlStatementContext; selectStatementContext.setIndexes(columnLabelIndexMap); MergedResult mergedResult = build(queryResults, selectStatementContext, columnLabelIndexMap, database); return decorate(queryResults, selectStatementContext, mergedResult); }
@Test void assertBuildIteratorStreamMergedResultWithMySQLLimit() throws SQLException { final ShardingDQLResultMerger resultMerger = new ShardingDQLResultMerger(TypedSPILoader.getService(DatabaseType.class, "MySQL")); ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(database.getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(mock(ShardingSphereSchema.class)); MySQLSelectStatement selectStatement = (MySQLSelectStatement) buildSelectStatement(new MySQLSelectStatement()); selectStatement.setProjections(new ProjectionsSegment(0, 0)); selectStatement.setLimit(new LimitSegment(0, 0, new NumberLiteralLimitValueSegment(0, 0, 1L), null)); SelectStatementContext selectStatementContext = new SelectStatementContext(createShardingSphereMetaData(database), Collections.emptyList(), selectStatement, DefaultDatabase.LOGIC_NAME, Collections.emptyList()); MergedResult actual = resultMerger.merge(createQueryResults(), selectStatementContext, createDatabase(), mock(ConnectionContext.class)); assertThat(actual, instanceOf(LimitDecoratorMergedResult.class)); assertThat(((LimitDecoratorMergedResult) actual).getMergedResult(), instanceOf(IteratorStreamMergedResult.class)); }
@Override public ParSeqBasedCompletionStage<T> whenCompleteAsync(BiConsumer<? super T, ? super Throwable> action, Executor executor) { return nextStageByComposingTask(_task.transformWith("whenCompleteAsync", prevTaskResult -> { if (prevTaskResult.isFailed()) { return Task.blocking(() -> { try { action.accept(null, prevTaskResult.getError()); } catch (Exception e) { // no ops } return null; }, executor) .flatMap((t) -> Task.failure(prevTaskResult.getError())); // always Complete the stage with original failure } else { return Task.blocking(() -> { action.accept(prevTaskResult.get(), prevTaskResult.getError()); // Complete the stage with original value or new failure return prevTaskResult.get(); }, executor); } })); }
@Test public void testWhenCompleteAsync() throws Exception { CountDownLatch waitLatch = new CountDownLatch(1); CompletionStage<String> stage = createTestStage(TESTVALUE1).whenCompleteAsync((v, t) -> { assertEquals(THREAD_NAME_VALUE, Thread.currentThread().getName()); waitLatch.countDown(); }, _mockExecutor); finish(stage); assertTrue(waitLatch.await(1000, TimeUnit.MILLISECONDS)); }
public static Duration parse(final String text) { try { final String[] parts = text.split("\\s"); if (parts.length != 2) { throw new IllegalArgumentException("Expected 2 tokens, got: " + parts.length); } final long size = parseNumeric(parts[0]); return buildDuration(size, parts[1]); } catch (final Exception e) { throw new IllegalArgumentException("Invalid duration: '" + text + "'. " + e.getMessage(), e); } }
@Test public void shouldThrowOnNonNumericDuration() { // Then: // When: final Exception e = assertThrows( IllegalArgumentException.class, () -> parse("10s Seconds") ); // Then: assertThat(e.getMessage(), containsString("Not numeric: '10s'")); }
static String createJobIdPrefix( String jobName, String stepUuid, JobType type, @Nullable String random) { jobName = jobName.replaceAll("-", ""); String result = BIGQUERY_JOB_TEMPLATE .replaceFirst("\\{TYPE}", type.toString()) .replaceFirst("\\{JOB_ID}", jobName) .replaceFirst("\\{STEP}", stepUuid); if (random != null) { return result.replaceFirst("\\{RANDOM}", random); } else { return result.replaceFirst("_\\{RANDOM}", ""); } }
@Test public void testJobRandomInNames() { assertEquals( "beam_bq_job_EXPORT_beamappjobtest_abcd_RANDOME", BigQueryResourceNaming.createJobIdPrefix( "beamapp-job-test", "abcd", JobType.EXPORT, "RANDOME")); }
public int releaseMessageNotificationBatch() { int batch = getIntProperty("apollo.release-message.notification.batch", DEFAULT_RELEASE_MESSAGE_NOTIFICATION_BATCH); return checkInt(batch, 1, Integer.MAX_VALUE, DEFAULT_RELEASE_MESSAGE_NOTIFICATION_BATCH); }
@Test public void testReleaseMessageNotificationBatchWithInvalidNumber() throws Exception { int someBatch = -20; int defaultBatch = 100; when(environment.getProperty("apollo.release-message.notification.batch")).thenReturn(String.valueOf(someBatch)); assertEquals(defaultBatch, bizConfig.releaseMessageNotificationBatch()); }
public CatalogueTreeSortStrategy getStrategy(String strategyName) { CatalogueTreeSortStrategy catalogueTreeSortStrategy = Safes.of(catalogueTreeSortStrategyMap).get(strategyName); if (Objects.isNull(catalogueTreeSortStrategy)) { log.warn("Strategy {} is not defined. Use DefaultStrategy", strategyName); catalogueTreeSortStrategy = Safes.of(catalogueTreeSortStrategyMap).get(CatalogueSortConstant.STRATEGY_DEFAULT); } if (Objects.isNull(catalogueTreeSortStrategy)) { throw new BusException(StrUtil.format("Strategy {} is not defined.", strategyName)); } return catalogueTreeSortStrategy; }
@Test public void getStrategyIllegalTest2() { when(catalogueTreeSortStrategyMap.get(anyString())).thenAnswer(invocationOnMock -> { String strategy = invocationOnMock.getArgument(0); return mockCatalogueTreeSortStrategyMap.get(strategy); }); CatalogueTreeSortStrategy strategy = catalogueTreeSortFactoryTest.getStrategy(null); assertEquals(defaultStrategy, strategy); }
public FEELFnResult<String> invoke(@ParameterName("from") Object val) { if ( val == null ) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( TypeUtil.formatValue(val, false) ); } }
@Test void invokeMaskedFormat() { FunctionTestUtil.assertResult(stringFunction.invoke("%s is here!", new Object[]{"Gorgonzola"}), "Gorgonzola " + "is here!"); }
public static boolean isDirectory(URL resourceURL) throws URISyntaxException { final String protocol = resourceURL.getProtocol(); switch (protocol) { case "jar": try { final JarURLConnection jarConnection = (JarURLConnection) resourceURL.openConnection(); final JarEntry entry = jarConnection.getJarEntry(); if (entry.isDirectory()) { return true; } // WARNING! Heuristics ahead. // It turns out that JarEntry#isDirectory() really just tests whether the filename ends in a '/'. // If you try to open the same URL without a trailing '/', it'll succeed — but the result won't be // what you want. We try to get around this by calling getInputStream() on the file inside the jar. // This seems to return null for directories (though that behavior is undocumented as far as I // can tell). If you have a better idea, please improve this. final String relativeFilePath = entry.getName(); final JarFile jarFile = jarConnection.getJarFile(); final ZipEntry zipEntry = jarFile.getEntry(relativeFilePath); final InputStream inputStream = jarFile.getInputStream(zipEntry); return inputStream == null; } catch (IOException e) { throw new ResourceNotFoundException(e); } case "file": return new File(resourceURL.toURI()).isDirectory(); default: throw new IllegalArgumentException("Unsupported protocol " + resourceURL.getProtocol() + " for resource " + resourceURL); } }
@Test void isDirectoryReturnsFalseForPlainFiles(@TempDir Path tempDir) throws Exception { final File tempFile = tempDir.resolve("resource_url_test").toFile(); assumeTrue(tempFile.createNewFile()); final URL url = tempFile.toURI().toURL(); assertThat(url.getProtocol()).isEqualTo("file"); assertThat(ResourceURL.isDirectory(url)).isFalse(); }
@Override public ApiResult<CoordinatorKey, ConsumerGroupDescription> handleResponse( Node coordinator, Set<CoordinatorKey> groupIds, AbstractResponse abstractResponse ) { final Map<CoordinatorKey, ConsumerGroupDescription> completed = new HashMap<>(); final Map<CoordinatorKey, Throwable> failed = new HashMap<>(); final Set<CoordinatorKey> groupsToUnmap = new HashSet<>(); if (abstractResponse instanceof DescribeGroupsResponse) { return handledClassicGroupResponse( coordinator, completed, failed, groupsToUnmap, (DescribeGroupsResponse) abstractResponse ); } else if (abstractResponse instanceof ConsumerGroupDescribeResponse) { return handledConsumerGroupResponse( coordinator, completed, failed, groupsToUnmap, (ConsumerGroupDescribeResponse) abstractResponse ); } else { throw new IllegalArgumentException("Received an unexpected response type."); } }
@Test public void testSuccessfulHandleConsumerGroupResponse() { DescribeConsumerGroupsHandler handler = new DescribeConsumerGroupsHandler(false, logContext); Collection<MemberDescription> members = singletonList(new MemberDescription( "memberId", Optional.of("instanceId"), "clientId", "host", new MemberAssignment(mkSet( new TopicPartition("foo", 0), new TopicPartition("bar", 1)) ), Optional.of(new MemberAssignment(mkSet( new TopicPartition("foo", 1), new TopicPartition("bar", 2) ))) )); ConsumerGroupDescription expected = new ConsumerGroupDescription( groupId1, false, members, "range", GroupType.CONSUMER, ConsumerGroupState.STABLE, coordinator, Collections.emptySet() ); AdminApiHandler.ApiResult<CoordinatorKey, ConsumerGroupDescription> result = handler.handleResponse( coordinator, Collections.singleton(CoordinatorKey.byGroupId(groupId1)), new ConsumerGroupDescribeResponse( new ConsumerGroupDescribeResponseData() .setGroups(Collections.singletonList( new ConsumerGroupDescribeResponseData.DescribedGroup() .setGroupId(groupId1) .setGroupState("Stable") .setGroupEpoch(10) .setAssignmentEpoch(10) .setAssignorName("range") .setAuthorizedOperations(Utils.to32BitField(emptySet())) .setMembers(singletonList( new ConsumerGroupDescribeResponseData.Member() .setMemberId("memberId") .setInstanceId("instanceId") .setClientHost("host") .setClientId("clientId") .setMemberEpoch(10) .setRackId("rackid") .setSubscribedTopicNames(singletonList("foo")) .setSubscribedTopicRegex("regex") .setAssignment(new ConsumerGroupDescribeResponseData.Assignment() .setTopicPartitions(Arrays.asList( new ConsumerGroupDescribeResponseData.TopicPartitions() .setTopicId(Uuid.randomUuid()) .setTopicName("foo") .setPartitions(Collections.singletonList(0)), new ConsumerGroupDescribeResponseData.TopicPartitions() .setTopicId(Uuid.randomUuid()) .setTopicName("bar") .setPartitions(Collections.singletonList(1)) ))) .setTargetAssignment(new ConsumerGroupDescribeResponseData.Assignment() .setTopicPartitions(Arrays.asList( new ConsumerGroupDescribeResponseData.TopicPartitions() .setTopicId(Uuid.randomUuid()) .setTopicName("foo") .setPartitions(Collections.singletonList(1)), new ConsumerGroupDescribeResponseData.TopicPartitions() .setTopicId(Uuid.randomUuid()) .setTopicName("bar") .setPartitions(Collections.singletonList(2)) ))) )) )) ) ); assertCompleted(result, expected); }
boolean isMultipleConstraints() { return multiple; }
@Test public void testIdentifyColumnCorrectly() { builder = new LhsBuilder(9, 1, null); assertThat(builder.isMultipleConstraints()).isFalse(); //will be added to Foo builder = new LhsBuilder(9, 1, "Foo"); assertThat(builder.isMultipleConstraints()).isTrue(); //will be added to eval builder = new LhsBuilder(9, 1, "f:Foo() eval "); assertThat(builder.isMultipleConstraints()).isTrue(); // will just be verbatim builder = new LhsBuilder(9, 1, "f: Foo()"); assertThat(builder.isMultipleConstraints()).isTrue(); }
public static UndoLogParser getInstance() { return SingletonHolder.INSTANCE; }
@Test void getInstance() { Assertions.assertThrowsExactly(IllegalArgumentException.class, () -> UndoLogParserFactory.getInstance("fst")); Assertions.assertTrue(UndoLogParserFactory.getInstance() instanceof JacksonUndoLogParser); }
@Override public final void run() { long valueCount = collector.getMergingValueCount(); if (valueCount == 0) { return; } runInternal(); assert operationCount > 0 : "No merge operations have been invoked in AbstractContainerMerger"; try { long timeoutMillis = Math.max(valueCount * TIMEOUT_FACTOR, MINIMAL_TIMEOUT_MILLIS); if (!semaphore.tryAcquire(operationCount, timeoutMillis, TimeUnit.MILLISECONDS)) { logger.warning("Split-brain healing for " + getLabel() + " didn't finish within the timeout..."); } } catch (InterruptedException e) { logger.finest("Interrupted while waiting for split-brain healing of " + getLabel() + "..."); Thread.currentThread().interrupt(); } finally { collector.destroy(); } }
@Test(expected = AssertionError.class) @RequireAssertEnabled public void testMergerRun_whenMissingOperationInvocation_thenMergerThrowsAssertion() { TestContainerMerger merger = new TestContainerMerger(collector, nodeEngine, null); merger.run(); }
@Override public List<OptExpression> transform(OptExpression input, OptimizerContext context) { LogicalOlapScanOperator logicalOlapScanOperator = (LogicalOlapScanOperator) input.getOp(); LogicalOlapScanOperator prunedOlapScanOperator = null; if (logicalOlapScanOperator.getSelectedPartitionId() == null) { prunedOlapScanOperator = OptOlapPartitionPruner.prunePartitions(logicalOlapScanOperator); } else { // do merge pruned partitions with new pruned partitions prunedOlapScanOperator = OptOlapPartitionPruner.mergePartitionPrune(logicalOlapScanOperator); } Utils.setOpAppliedRule(prunedOlapScanOperator, Operator.OP_PARTITION_PRUNE_BIT); return Lists.newArrayList(OptExpression.create(prunedOlapScanOperator, input.getInputs())); }
@Test public void transformForSingleItemListPartition(@Mocked OlapTable olapTable, @Mocked ListPartitionInfo partitionInfo) throws AnalysisException { FeConstants.runningUnitTest = true; ColumnRefFactory columnRefFactory = new ColumnRefFactory(); ColumnRefOperator column = columnRefFactory.create("province", ScalarType.STRING, false); Map<ColumnRefOperator, Column> scanColumnMap = Maps.newHashMap(); scanColumnMap.put(column, new Column("province", Type.STRING, false)); Map<Column, ColumnRefOperator> columnMetaToColRefMap = new HashMap<>(); columnMetaToColRefMap.put(new Column(column.getName(), column.getType()), new ColumnRefOperator(1, column.getType(), column.getName(), false)); BinaryPredicateOperator binaryPredicateOperator = new BinaryPredicateOperator(BinaryType.EQ, column, ConstantOperator.createVarchar("guangdong")); ScalarOperator predicate = Utils.compoundAnd(binaryPredicateOperator); LogicalOlapScanOperator operator = new LogicalOlapScanOperator(olapTable, scanColumnMap, columnMetaToColRefMap, null, -1, predicate); Partition part1 = new Partition(10001L, "p1", null, null); Partition part2 = new Partition(10002L, "p2", null, null); List<LiteralExpr> p1 = Lists.newArrayList( new PartitionValue("guangdong").getValue(Type.STRING), new PartitionValue("shanghai").getValue(Type.STRING)); List<LiteralExpr> p2 = Lists.newArrayList( new PartitionValue("beijing").getValue(Type.STRING), new PartitionValue("chongqing").getValue(Type.STRING)); Map<Long, List<LiteralExpr>> literalExprValues = new HashMap<>(); literalExprValues.put(10001L, p1); literalExprValues.put(10002L, p2); List<ColumnId> partitionColumns = Lists.newArrayList(ColumnId.create("province")); new Expectations() { { olapTable.getPartitionInfo(); result = partitionInfo; partitionInfo.getType(); result = PartitionType.LIST; partitionInfo.getLiteralExprValues(); result = literalExprValues; olapTable.getPartitions(); result = Lists.newArrayList(part1, part2); minTimes = 0; partitionInfo.getPartitionColumns((Map<ColumnId, Column>) any); result = Lists.newArrayList(new Column("province", Type.STRING, false)); minTimes = 0; partitionInfo.getPartitionIds(false); result = Lists.newArrayList(10001L, 10002L); olapTable.getPartition(10001L); result = part1; minTimes = 0; olapTable.getPartition(10002L); result = part2; minTimes = 0; } }; PartitionPruneRule rule = new PartitionPruneRule(); assertNull(operator.getSelectedPartitionId()); OptExpression optExpression = rule.transform(new OptExpression(operator), new OptimizerContext(new Memo(), columnRefFactory)).get(0); List<Long> selectPartitionIds = ((LogicalOlapScanOperator) optExpression.getOp()).getSelectedPartitionId(); assertEquals(1, selectPartitionIds.size()); long actual = selectPartitionIds.get(0); assertEquals(10001L, actual); }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal n, @ParameterName( "scale" ) BigDecimal scale) { if ( n == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "n", "cannot be null")); } if ( scale == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "scale", "cannot be null")); } // Based on Table 76: Semantics of numeric functions, the scale is in range −6111 .. 6176 if (scale.compareTo(BigDecimal.valueOf(-6111)) < 0 || scale.compareTo(BigDecimal.valueOf(6176)) > 0) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "scale", "must be in range between -6111 to 6176.")); } return FEELFnResult.ofResult( n.setScale( scale.intValue(), RoundingMode.HALF_EVEN ) ); }
@Test void invokeRoundingEven() { FunctionTestUtil.assertResult(decimalFunction.invoke(BigDecimal.valueOf(10.25), BigDecimal.ONE), BigDecimal.valueOf(10.2)); }
public CompletableFuture<Void> subscriptionLevelChanged( byte[] user, Instant subscriptionLevelChangedAt, long level, String subscriptionId) { checkUserLength(user); UpdateItemRequest request = UpdateItemRequest.builder() .tableName(table) .key(Map.of(KEY_USER, b(user))) .returnValues(ReturnValue.NONE) .updateExpression("SET " + "#accessed_at = :accessed_at, " + "#subscription_id = :subscription_id, " + "#subscription_level = :subscription_level, " + "#subscription_level_changed_at = :subscription_level_changed_at") .expressionAttributeNames(Map.of( "#accessed_at", KEY_ACCESSED_AT, "#subscription_id", KEY_SUBSCRIPTION_ID, "#subscription_level", KEY_SUBSCRIPTION_LEVEL, "#subscription_level_changed_at", KEY_SUBSCRIPTION_LEVEL_CHANGED_AT)) .expressionAttributeValues(Map.of( ":accessed_at", n(subscriptionLevelChangedAt.getEpochSecond()), ":subscription_id", s(subscriptionId), ":subscription_level", n(level), ":subscription_level_changed_at", n(subscriptionLevelChangedAt.getEpochSecond()))) .build(); return client.updateItem(request).thenApply(updateItemResponse -> null); }
@Test void testSubscriptionLevelChanged() { Instant at = Instant.ofEpochSecond(NOW_EPOCH_SECONDS + 500); long level = 1776; String updatedSubscriptionId = "new"; assertThat(subscriptions.create(user, password, created)).succeedsWithin(DEFAULT_TIMEOUT); assertThat(subscriptions.subscriptionCreated(user, "original", created, level - 1)).succeedsWithin( DEFAULT_TIMEOUT); assertThat(subscriptions.subscriptionLevelChanged(user, at, level, updatedSubscriptionId)).succeedsWithin( DEFAULT_TIMEOUT); assertThat(subscriptions.get(user, password)).succeedsWithin(DEFAULT_TIMEOUT).satisfies(getResult -> { assertThat(getResult).isNotNull(); assertThat(getResult.type).isEqualTo(FOUND); assertThat(getResult.record).isNotNull().satisfies(record -> { assertThat(record.accessedAt).isEqualTo(at); assertThat(record.subscriptionLevelChangedAt).isEqualTo(at); assertThat(record.subscriptionLevel).isEqualTo(level); assertThat(record.subscriptionId).isEqualTo(updatedSubscriptionId); }); }); }
public abstract int publish(TopicPath topic, List<OutgoingMessage> outgoingMessages) throws IOException;
@Test public void publishOneMessage() throws IOException { OutgoingMessage expectedOutgoingMessage = OutgoingMessage.of( PubsubMessage.newBuilder().setData(ByteString.copyFromUtf8(DATA)).build(), MESSAGE_TIME, MESSAGE_ID, null); try (PubsubTestClientFactory factory = PubsubTestClient.createFactoryForPublish( TOPIC, Sets.newHashSet(expectedOutgoingMessage), ImmutableList.of())) { try (PubsubTestClient client = (PubsubTestClient) factory.newClient(null, null, null)) { client.publish(TOPIC, ImmutableList.of(expectedOutgoingMessage)); } } }
public static LatLong destinationPoint(LatLong start, double distance, float bearing) { double theta = Math.toRadians(bearing); double delta = distance / EQUATORIAL_RADIUS; // angular distance in radians double phi1 = Math.toRadians(start.latitude); double lambda1 = Math.toRadians(start.longitude); double phi2 = Math.asin(Math.sin(phi1) * Math.cos(delta) + Math.cos(phi1) * Math.sin(delta) * Math.cos(theta)); double lambda2 = lambda1 + Math.atan2(Math.sin(theta) * Math.sin(delta) * Math.cos(phi1), Math.cos(delta) - Math.sin(phi1) * Math.sin(phi2)); return new LatLong(Math.toDegrees(phi2), Math.toDegrees(lambda2)); }
@Test public void destinationPointTest() { LatLong start = new LatLong(45, 45); LatLong expected = new LatLong(45.006352, 45.008984); LatLong actual = start.destinationPoint(1000, 45); Assert.assertEquals(LatLongUtils.degreesToMicrodegrees(expected.latitude), LatLongUtils.degreesToMicrodegrees(actual.latitude), 0); Assert.assertEquals(LatLongUtils.degreesToMicrodegrees(expected.longitude), LatLongUtils.degreesToMicrodegrees(actual.longitude), 0); }
public Flowable<String> getKeys() { return getKeysByPattern(null); }
@Test public void testGetKeys() { RKeysRx keys = redisson.getKeys(); sync(redisson.getBucket("test1").set(1)); sync(redisson.getBucket("test2").set(1)); Flowable<String> k = keys.getKeys(); assertThat(k.blockingIterable()).contains("test1", "test2"); }
@Override public List<Intent> compile(LinkCollectionIntent intent, List<Intent> installable) { SetMultimap<DeviceId, PortNumber> inputPorts = HashMultimap.create(); SetMultimap<DeviceId, PortNumber> outputPorts = HashMultimap.create(); Map<ConnectPoint, Identifier<?>> labels = ImmutableMap.of(); Optional<EncapsulationConstraint> encapConstraint = this.getIntentEncapConstraint(intent); computePorts(intent, inputPorts, outputPorts); if (encapConstraint.isPresent()) { labels = labelAllocator.assignLabelToPorts(intent.links(), intent.key(), encapConstraint.get().encapType(), encapConstraint.get().suggestedIdentifier()); } ImmutableList.Builder<Intent> intentList = ImmutableList.builder(); if (this.isDomainProcessingEnabled(intent)) { intentList.addAll(this.getDomainIntents(intent, domainService)); } List<Objective> objectives = new ArrayList<>(); List<DeviceId> devices = new ArrayList<>(); for (DeviceId deviceId : outputPorts.keySet()) { // add only objectives that are not inside of a domain if (LOCAL.equals(domainService.getDomain(deviceId))) { List<Objective> deviceObjectives = createRules(intent, deviceId, inputPorts.get(deviceId), outputPorts.get(deviceId), labels); deviceObjectives.forEach(objective -> { objectives.add(objective); devices.add(deviceId); }); } } // if any objectives have been created if (!objectives.isEmpty()) { intentList.add(new FlowObjectiveIntent(appId, intent.key(), devices, objectives, intent.resources(), intent.resourceGroup())); } return intentList.build(); }
@Test public void testCompile() { LinkCollectionIntent intent = LinkCollectionIntent.builder() .appId(appId) .selector(selector) .treatment(treatment) .links(links) .filteredIngressPoints(ImmutableSet.of(new FilteredConnectPoint(d1p1))) .filteredEgressPoints(ImmutableSet.of(new FilteredConnectPoint(d3p1))) .build(); List<Intent> result = compiler.compile(intent, Collections.emptyList()); assertThat(result, hasSize(1)); assertThat(result.get(0), instanceOf(FlowObjectiveIntent.class)); FlowObjectiveIntent foIntent = (FlowObjectiveIntent) result.get(0); List<Objective> objectives = foIntent.objectives(); assertThat(objectives, hasSize(6)); /* * First set of objective */ forwardingObjective = (ForwardingObjective) objectives.get(0); nextObjective = (NextObjective) objectives.get(1); // expect selector and treatment TrafficSelector expectSelector = DefaultTrafficSelector.builder() .matchInPort(PortNumber.portNumber(1)) .build(); TrafficTreatment expectTreatment = DefaultTrafficTreatment.builder() .setOutput(PortNumber.portNumber(1)) .build(); // test case for first next objective checkNext(nextObjective, SIMPLE, expectTreatment, expectSelector, ADD); // test case for first forwarding objective checkForward(forwardingObjective, ADD, expectSelector, nextObjective.id(), SPECIFIC); /* * Second set of objective */ forwardingObjective = (ForwardingObjective) objectives.get(2); nextObjective = (NextObjective) objectives.get(3); expectSelector = DefaultTrafficSelector.builder() .matchInPort(PortNumber.portNumber(0)) .build(); // test case for second next objective checkNext(nextObjective, SIMPLE, expectTreatment, expectSelector, ADD); // test case for second forwarding objective checkForward(forwardingObjective, ADD, expectSelector, nextObjective.id(), SPECIFIC); /* * 3rd set of objective */ forwardingObjective = (ForwardingObjective) objectives.get(4); nextObjective = (NextObjective) objectives.get(5); expectSelector = DefaultTrafficSelector.builder() .matchInPort(PortNumber.portNumber(1)) .build(); // test case for 3rd next objective checkNext(nextObjective, SIMPLE, expectTreatment, expectSelector, ADD); // test case for 3rd forwarding objective checkForward(forwardingObjective, ADD, expectSelector, nextObjective.id(), SPECIFIC); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { KeyAuthRuleHandle keyAuthRuleHandle = KeyAuthPluginDataHandler.CACHED_HANDLE.get() .obtainHandle(CacheKeyUtils.INST.getKey(rule)); if (Objects.isNull(keyAuthRuleHandle) || StringUtils.isBlank(keyAuthRuleHandle.getKeyName()) || StringUtils.isBlank(keyAuthRuleHandle.getKey())) { Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.KEY_NAME_AND_KEY_MUST_BE_CONFIGURED); return WebFluxResultUtils.result(exchange, error); } if (checkKey(exchange, keyAuthRuleHandle.getKeyName(), keyAuthRuleHandle.getKey())) { return chain.execute(exchange); } Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.ERROR_KEY); return WebFluxResultUtils.result(exchange, error); }
@Test public void testKeyAuthWithIncorrectKey() { ruleData.setHandle("{\"keyName\":\"apiKey\",\"key\":\"key\"," + "\"hideCredentials\":\"false\"}"); keyAuthPluginDataHandler.handlerRule(ruleData); exchange = MockServerWebExchange.from(MockServerHttpRequest .get("localhost") .header("apiKey", "123456") .build()); Mono<Void> mono = keyAuthPlugin.doExecute(exchange, chain, selectorData, ruleData); StepVerifier.create(mono).expectSubscription().verifyComplete(); }
@Override public String getName() { return LocaleFactory.localizedString("Akamai", "Mosso"); }
@Test public void testGetName() throws Exception { final DistributionConfiguration configuration = new SwiftDistributionConfiguration(session); assertEquals("Akamai", configuration.getName()); }
public String nonNullValue(String key) { String value = value(key); if (value == null) { throw new IllegalArgumentException("Missing property: " + key); } return value; }
@Test public void nonNullValue_throws_IAE_on_non_existing_key() { Props props = new Props(new Properties()); assertThatThrownBy(() -> props.nonNullValue("other")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Missing property: other"); }
@Override public void upgrade() { if (clusterConfigService.get(MigrationCompleted.class) != null) { LOG.debug("Migration already completed."); return; } final UpdateResult updateResult = searchesCollection .updateMany( and( isDashboard(), atLeastOneQueryHasNonEmptyQueryString() ), makeQueryStringEmpty(), forNonEmptyQueryStrings() ); writeMigrationCompleted(updateResult.getModifiedCount()); }
@Test public void doesNotRunIfMigrationHasCompletedBefore() { when(clusterConfigService.get(MigrationCompleted.class)).thenReturn(MigrationCompleted.create(0)); this.migration.upgrade(); verify(viewsCollection, never()).find(any(Bson.class)); verify(searchesCollection, never()).find(any(Bson.class)); }
public void startAsync() { try { udfLoader.load(); ProcessingLogServerUtils.maybeCreateProcessingLogTopic( serviceContext.getTopicClient(), processingLogConfig, ksqlConfig); if (processingLogConfig.getBoolean(ProcessingLogConfig.STREAM_AUTO_CREATE)) { log.warn("processing log auto-create is enabled, but this is not supported " + "for headless mode."); } rocksDBConfigSetterHandler.accept(ksqlConfig); processesQueryFile(readQueriesFile(queriesFile)); showWelcomeMessage(); final Properties properties = new Properties(); ksqlConfig.originals().forEach((key, value) -> { if (nonNull(value)) { properties.put(key, value.toString()); } }); versionChecker.start(KsqlModuleType.SERVER, properties); } catch (final Exception e) { log.error("Failed to start KSQL Server with query file: " + queriesFile, e); throw e; } }
@Test public void shouldRunCsStatement() { // Given: final PreparedStatement<CreateStream> cs = PreparedStatement.of("CS", new CreateStream(SOME_NAME, SOME_ELEMENTS, false, false, JSON_PROPS, false)); givenQueryFileParsesTo(cs); // When: standaloneExecutor.startAsync(); // Then: verify(ksqlEngine).execute(serviceContext, ConfiguredStatement.of(cs, SessionConfig.of(ksqlConfig, emptyMap()))); }
private RemotingCommand createAcl(ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { RemotingCommand response = RemotingCommand.createResponseCommand(null); CreateAclRequestHeader requestHeader = request.decodeCommandCustomHeader(CreateAclRequestHeader.class); Subject subject = Subject.of(requestHeader.getSubject()); AclInfo aclInfo = RemotingSerializable.decode(request.getBody(), AclInfo.class); if (aclInfo == null || CollectionUtils.isEmpty(aclInfo.getPolicies())) { throw new AuthorizationException("The body of acl is null"); } Acl acl = AclConverter.convertAcl(aclInfo); if (acl != null && acl.getSubject() == null) { acl.setSubject(subject); } this.brokerController.getAuthorizationMetadataManager().createAcl(acl) .thenAccept(nil -> response.setCode(ResponseCode.SUCCESS)) .exceptionally(ex -> { LOGGER.error("create acl for {} error", requestHeader.getSubject(), ex); return handleAuthException(response, ex); }) .join(); return response; }
@Test public void testCreateAcl() throws RemotingCommandException { when(authorizationMetadataManager.createAcl(any(Acl.class))) .thenReturn(CompletableFuture.completedFuture(null)); CreateAclRequestHeader createAclRequestHeader = new CreateAclRequestHeader(); createAclRequestHeader.setSubject("User:abc"); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.AUTH_CREATE_ACL, createAclRequestHeader); request.setVersion(441); request.addExtField("AccessKey", "rocketmq"); request.makeCustomHeaderToNet(); AclInfo aclInfo = AclInfo.of("User:abc", Arrays.asList("Topic:*"), Arrays.asList("PUB"), Arrays.asList("192.168.0.1"), "Grant"); request.setBody(JSON.toJSONBytes(aclInfo)); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); }
public FEELFnResult<BigDecimal> invoke(@ParameterName("from") String from, @ParameterName("grouping separator") String group, @ParameterName("decimal separator") String decimal) { if ( from == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null")); } if ( group != null && !group.equals( " " ) && !group.equals( "." ) && !group.equals( "," ) ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "group", "not a valid one, can only be one of: dot ('.'), comma (','), space (' ') ")); } if ( decimal != null ) { if (!decimal.equals( "." ) && !decimal.equals( "," )) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "not a valid one, can only be one of: dot ('.'), comma (',') ")); } else if (group != null && decimal.equals( group )) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "cannot be the same as parameter 'group' ")); } } if ( group != null ) { from = from.replaceAll( "\\" + group, "" ); } if ( decimal != null ) { from = from.replaceAll( "\\" + decimal, "." ); } BigDecimal result = NumberEvalHelper.getBigDecimalOrNull(from ); if( from != null && result == null ) { // conversion failed return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to calculate final number result" ) ); } else { return FEELFnResult.ofResult( result ); } }
@Test void invokeIllegalNumber() { FunctionTestUtil.assertResultError(numberFunction.invoke("test", null, null), InvalidParametersEvent.class); }
public PlanNodeStatsEstimate capStats(PlanNodeStatsEstimate stats, PlanNodeStatsEstimate cap) { if (stats.isOutputRowCountUnknown() || cap.isOutputRowCountUnknown()) { return PlanNodeStatsEstimate.unknown(); } PlanNodeStatsEstimate.Builder result = PlanNodeStatsEstimate.builder(); double cappedRowCount = min(stats.getOutputRowCount(), cap.getOutputRowCount()); result.setOutputRowCount(cappedRowCount); stats.getVariablesWithKnownStatistics().forEach(symbol -> { VariableStatsEstimate symbolStats = stats.getVariableStatistics(symbol); VariableStatsEstimate capSymbolStats = cap.getVariableStatistics(symbol); VariableStatsEstimate.Builder newSymbolStats = VariableStatsEstimate.builder(); // for simplicity keep the average row size the same as in the input // in most cases the average row size doesn't change after applying filters newSymbolStats.setAverageRowSize(symbolStats.getAverageRowSize()); newSymbolStats.setDistinctValuesCount(min(symbolStats.getDistinctValuesCount(), capSymbolStats.getDistinctValuesCount())); double newLow = max(symbolStats.getLowValue(), capSymbolStats.getLowValue()); double newHigh = min(symbolStats.getHighValue(), capSymbolStats.getHighValue()); newSymbolStats.setLowValue(newLow); newSymbolStats.setHighValue(newHigh); double numberOfNulls = stats.getOutputRowCount() * symbolStats.getNullsFraction(); double capNumberOfNulls = cap.getOutputRowCount() * capSymbolStats.getNullsFraction(); double cappedNumberOfNulls = min(numberOfNulls, capNumberOfNulls); double cappedNullsFraction = cappedRowCount == 0 ? 1 : cappedNumberOfNulls / cappedRowCount; newSymbolStats.setNullsFraction(cappedNullsFraction); if (shouldUseHistograms) { newSymbolStats.setHistogram(symbolStats.getHistogram().map(symbolHistogram -> addConjunction(symbolHistogram, new StatisticRange(newLow, newHigh, 0)))); } result.addVariableStatistics(symbol, newSymbolStats.build()); }); return result.build(); }
@Test public void testCapRowCount() { PlanNodeStatsEstimate unknownRowCount = statistics(NaN, NaN, NaN, NaN, NON_EMPTY_RANGE); PlanNodeStatsEstimate first = statistics(20, NaN, NaN, NaN, NON_EMPTY_RANGE); PlanNodeStatsEstimate second = statistics(10, NaN, NaN, NaN, NON_EMPTY_RANGE); assertEquals(calculator.capStats(unknownRowCount, unknownRowCount).getOutputRowCount(), NaN); assertEquals(calculator.capStats(first, unknownRowCount).getOutputRowCount(), NaN); assertEquals(calculator.capStats(unknownRowCount, second).getOutputRowCount(), NaN); assertEquals(calculator.capStats(first, second).getOutputRowCount(), 10.0); assertEquals(calculator.capStats(second, first).getOutputRowCount(), 10.0); }
public static String appendPathSeparatorIfMissing(String path) { return StringUtils.appendIfMissing(path, "/", "/"); }
@Test void appendPathSeparatorIfMissing() { String s = PathUtils.appendPathSeparatorIfMissing("a"); assertThat(s).isEqualTo("a/"); s = PathUtils.appendPathSeparatorIfMissing("a/"); assertThat(s).isEqualTo("a/"); s = PathUtils.appendPathSeparatorIfMissing(null); assertThat(s).isEqualTo(null); }
public MaterializedConfiguration getConfiguration() { MaterializedConfiguration conf = new SimpleMaterializedConfiguration(); FlumeConfiguration fconfig = getFlumeConfiguration(); AgentConfiguration agentConf = fconfig.getConfigurationFor(getAgentName()); if (agentConf != null) { Map<String, ChannelComponent> channelComponentMap = Maps.newHashMap(); Map<String, SourceRunner> sourceRunnerMap = Maps.newHashMap(); Map<String, SinkRunner> sinkRunnerMap = Maps.newHashMap(); try { loadChannels(agentConf, channelComponentMap); loadSources(agentConf, channelComponentMap, sourceRunnerMap); loadSinks(agentConf, channelComponentMap, sinkRunnerMap); Set<String> channelNames = new HashSet<String>(channelComponentMap.keySet()); for (String channelName : channelNames) { ChannelComponent channelComponent = channelComponentMap.get(channelName); if (channelComponent.components.isEmpty()) { LOGGER.warn("Channel {} has no components connected" + " and has been removed.", channelName); channelComponentMap.remove(channelName); Map<String, Channel> nameChannelMap = channelCache.get(channelComponent.channel.getClass()); if (nameChannelMap != null) { nameChannelMap.remove(channelName); } } else { LOGGER.info("Channel {} connected to {}", channelName, channelComponent.components.toString()); conf.addChannel(channelName, channelComponent.channel); } } for (Map.Entry<String, SourceRunner> entry : sourceRunnerMap.entrySet()) { conf.addSourceRunner(entry.getKey(), entry.getValue()); } for (Map.Entry<String, SinkRunner> entry : sinkRunnerMap.entrySet()) { conf.addSinkRunner(entry.getKey(), entry.getValue()); } } catch (InstantiationException ex) { LOGGER.error("Failed to instantiate component", ex); } finally { channelComponentMap.clear(); sourceRunnerMap.clear(); sinkRunnerMap.clear(); } } else { LOGGER.warn("No configuration found for this host:{}", getAgentName()); } return conf; }
@Test public void testUnspecifiedChannel() throws Exception { String agentName = "agent1"; Map<String, String> properties = getPropertiesForChannel(agentName, UnspecifiedChannel.class.getName()); MemoryConfigurationProvider provider = new MemoryConfigurationProvider(agentName, properties); MaterializedConfiguration config1 = provider.getConfiguration(); Channel channel1 = config1.getChannels().values().iterator().next(); assertTrue(channel1 instanceof UnspecifiedChannel); MaterializedConfiguration config2 = provider.getConfiguration(); Channel channel2 = config2.getChannels().values().iterator().next(); assertTrue(channel2 instanceof UnspecifiedChannel); assertSame(channel1, channel2); }
@Override public boolean overlap(final Window other) { if (getClass() != other.getClass()) { throw new IllegalArgumentException("Cannot compare windows of different type. Other window has type " + other.getClass() + "."); } return true; }
@Test public void shouldAlwaysOverlap() { assertTrue(window.overlap(new UnlimitedWindow(start - 1))); assertTrue(window.overlap(new UnlimitedWindow(start))); assertTrue(window.overlap(new UnlimitedWindow(start + 1))); }
public static long getMaxSequenceNumber() { return SEQUENCE_NUMBER_MASK; }
@Test public void getMaxSequenceNumber() { assertEquals(MAX_SEQUENCE_NUMBER, BlockId.getMaxSequenceNumber()); }
public DateTimeStamp minus(double offsetInDecimalSeconds) { return add(-offsetInDecimalSeconds); }
@Test void testMinus() { DateTimeStamp a = new DateTimeStamp(.586); DateTimeStamp b = new DateTimeStamp(.587); double diff = a.minus(b); assertEquals(.586 - .587, diff, .001); a = new DateTimeStamp("2018-04-04T09:10:00.586-0100"); b = new DateTimeStamp("2018-04-04T09:10:00.587-0100"); diff = a.minus(b); double expected = -0.001d; assertEquals(expected, diff, 0.001); a = new DateTimeStamp("2018-04-04T09:10:00.586-0100", .18); b = new DateTimeStamp("2018-04-04T09:10:00.587-0100", .19); diff = a.minus(b); assertEquals(.18 - .19, diff, .001); }
public TrustManagerFactory createTrustManagerFactory() throws NoSuchProviderException, NoSuchAlgorithmException { return getProvider() != null ? TrustManagerFactory.getInstance(getAlgorithm(), getProvider()) : TrustManagerFactory.getInstance(getAlgorithm()); }
@Test public void testDefaults() throws Exception { assertNotNull(factoryBean.createTrustManagerFactory()); }
@Override @Nonnull public <T extends DataConnection> T getAndRetainDataConnection(String name, Class<T> clazz) { DataConnectionEntry dataConnection = dataConnections.computeIfPresent(name, (k, v) -> { if (!clazz.isInstance(v.instance)) { throw new HazelcastException("Data connection '" + name + "' must be an instance of " + clazz); } v.instance.retain(); return v; }); if (dataConnection == null) { throw new HazelcastException("Data connection '" + name + "' not found"); } //noinspection unchecked return (T) dataConnection.instance; }
@Test public void should_return_dynamically_added_data_connection() { instance.getConfig().addDataConnectionConfig( new DataConnectionConfig(TEST_DYNAMIC_CONFIG) .setType(DUMMY_DATA_CONNECTION_TYPE) .setProperty("customProperty", "value") ); DataConnection dataConnection = dataConnectionService.getAndRetainDataConnection(TEST_DYNAMIC_CONFIG, DummyDataConnection.class); assertThat(dataConnection).isInstanceOf(DummyDataConnection.class); assertThat(dataConnection.getName()).isEqualTo(TEST_DYNAMIC_CONFIG); assertThat(dataConnection.getConfig().getProperties()) .containsEntry("customProperty", "value"); }
public static Map<String, ResourceModel> buildResourceModels(final Set<Class<?>> restliAnnotatedClasses) { Map<String, ResourceModel> rootResourceModels = new HashMap<>(); Map<Class<?>, ResourceModel> resourceModels = new HashMap<>(); for (Class<?> annotatedClass : restliAnnotatedClasses) { processResourceInOrder(annotatedClass, resourceModels, rootResourceModels); } return rootResourceModels; }
@Test public void testPathKeyParamAnnotations() { // Test correct use of both @PathKeyParam and @PathKeysParam final Map<String, ResourceModel> resourceModels = new HashMap<>(); try { resourceModels.putAll(RestLiApiBuilder.buildResourceModels(Collections.singleton(SampleResources.PathKeyParamAnnotationsResource.class))); } catch (Exception exception) { Assert.fail(String.format("Unexpected exception: class: %s, message: \"%s\"", SampleResources.PathKeyParamAnnotationsResource.class.getSimpleName(), exception.getMessage())); } Assert.assertEquals(1, resourceModels.size()); Assert.assertTrue(resourceModels.containsKey("/pathKeyParamAnnotations")); // Test incorrect usage of @PathKeyParam (unrecognized path key name) try { RestLiApiBuilder.buildResourceModels(Collections.singleton(SampleResources.BadPathKeyParamAnnotationsResource.class)); Assert.fail("Expected a ResourceConfigException due to unrecognized path key names."); } catch (Exception exception) { Assert.assertTrue(exception instanceof ResourceConfigException); Assert.assertEquals("Parameter unknownId not found in path keys of class class " + "com.linkedin.restli.internal.server.model.SampleResources$BadPathKeyParamAnnotationsResource", exception.getMessage()); } }
private QueryParamsDataMap() { }
@Test public void testMultipleSimpleKeys() throws Exception { String testQS = "ids=1&ids=2&ids=bla&q=someFinder"; DataMap queryParamDataMap = queryParamsDataMap(testQS); Assert.assertEquals("someFinder", queryParamDataMap.get("q")); Object idsObj = queryParamDataMap.get("ids"); Assert.assertTrue(idsObj instanceof DataList); DataList ids = (DataList)idsObj; Assert.assertEquals(ids.get(0), "1"); Assert.assertEquals(ids.get(1), "2"); Assert.assertEquals(ids.get(2), "bla"); }