focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static Object judgeCustomThrowableForGenericObject(Object appObject) { if (!GENERIC_THROW_EXCEPTION || appObject == null) { return appObject; } if (!(appObject instanceof GenericObject)) { return appObject; } for (String field : THROWABLE_FIELDS) { if (!((GenericObject) appObject).hasField(field)) { return appObject; } } return new RuntimeException( "occur business exception, but type=" + ((GenericObject) appObject).getType() + " class is not found, error: " + appObject); }
@Test public void testJudgeCustomThrowable() throws Exception { setGenericThrowException(true); try { Assert.assertNull(judgeCustomThrowableForGenericObject(null)); Object o = new Object(); Assert.assertEquals(o, judgeCustomThrowableForGenericObject(o)); GenericObject genericObject = new GenericObject(""); Assert.assertEquals(genericObject, judgeCustomThrowableForGenericObject(genericObject)); genericObject.putField("xxx", "yyy"); Assert.assertEquals(genericObject, judgeCustomThrowableForGenericObject(genericObject)); genericObject.putField("cause", "yyy"); genericObject.putField("detailMessage", "yyy"); genericObject.putField("stackTrace", "yyy"); genericObject.putField("suppressedExceptions", "yyy"); Assert.assertTrue(judgeCustomThrowableForGenericObject(genericObject) instanceof RuntimeException); } finally { setGenericThrowException(false); } }
@Override public void handlerPlugin(final PluginData pluginData) { Map<String, String> configMap = GsonUtils.getInstance().toObjectMap(pluginData.getConfig(), String.class); String secretKey = Optional.ofNullable(configMap.get(Constants.SECRET_KEY)).orElse(""); JwtConfig jwtConfig = new JwtConfig(); jwtConfig.setSecretKey(secretKey); Singleton.INST.single(JwtConfig.class, jwtConfig); }
@Test public void testHandlerPlugin() { final PluginData pluginData = new PluginData("pluginId", "pluginName", "{\"secretKey\":\"shenyu\"}", "0", false, null); jwtPluginDataHandlerUnderTest.handlerPlugin(pluginData); JwtConfig jwtConfig = Singleton.INST.get(JwtConfig.class); Map<String, String> map = GsonUtils.getInstance().toObjectMap(pluginData.getConfig(), String.class); assertEquals(jwtConfig.getSecretKey(), map.get("secretKey")); }
@Override public T build(ConfigurationSourceProvider provider, String path) throws IOException, ConfigurationException { try (InputStream input = provider.open(requireNonNull(path))) { final JsonNode node = mapper.readTree(createParser(input)); if (node == null) { throw ConfigurationParsingException .builder("Configuration at " + path + " must not be empty") .build(path); } return build(node, path); } catch (JsonParseException e) { throw ConfigurationParsingException .builder("Malformed " + formatName) .setCause(e) .setLocation(e.getLocation()) .setDetail(e.getMessage()) .build(path); } }
@Test void handleDefaultConfigurationWithoutOverriding() throws Exception { final ExampleWithDefaults example = new YamlConfigurationFactory<>(ExampleWithDefaults.class, validator, Jackson.newObjectMapper(), "dw") .build(); assertThat(example) .satisfies(eg -> assertThat(eg.name).isEqualTo("Coda Hale")) .satisfies(eg -> assertThat(eg.type).containsExactly("coder", "wizard")) .satisfies(eg -> assertThat(eg.properties).containsOnly(MapEntry.entry("debug", "true"), MapEntry.entry("settings.enabled", "false"))) .satisfies(eg -> assertThat(eg.servers) .satisfies(servers -> assertThat(servers).element(0).extracting(ExampleServer::getPort).isEqualTo(8080)) .satisfies(servers -> assertThat(servers).element(1).extracting(ExampleServer::getPort).isEqualTo(8081)) .satisfies(servers -> assertThat(servers).element(2).extracting(ExampleServer::getPort).isEqualTo(8082))); }
@Override boolean addStorage(DatanodeStorageInfo storage, Block reportedBlock) { Preconditions.checkArgument(BlockIdManager.isStripedBlockID( reportedBlock.getBlockId()), "reportedBlock is not striped"); Preconditions.checkArgument(BlockIdManager.convertToStripedID( reportedBlock.getBlockId()) == this.getBlockId(), "reported blk_%s does not belong to the group of stored blk_%s", reportedBlock.getBlockId(), this.getBlockId()); int blockIndex = BlockIdManager.getBlockIndex(reportedBlock); int index = blockIndex; DatanodeStorageInfo old = getStorageInfo(index); if (old != null && !old.equals(storage)) { // over replicated // check if the storage has been stored int i = findStorageInfo(storage); if (i == -1) { index = findSlot(); } else { return true; } } addStorage(storage, index, blockIndex); return true; }
@Test(expected=IllegalArgumentException.class) public void testAddStorageWithReplicatedBlock() { DatanodeStorageInfo storage = DFSTestUtil.createDatanodeStorageInfo( "storageID", "127.0.0.1"); BlockInfo replica = new BlockInfoContiguous(new Block(1000L), (short) 3); info.addStorage(storage, replica); }
@Override public boolean isIn(String ipAddress) { if (ipAddress == null || addressList == null) { return false; } return addressList.includes(ipAddress); }
@Test public void testFileNotSpecified() { IPList ipl = new FileBasedIPList(null); assertFalse("110.113.221.222 is in the list", ipl.isIn("110.113.221.222")); }
@Override public Object get(int fieldNum) { try { StructField fref = soi.getAllStructFieldRefs().get(fieldNum); return HCatRecordSerDe.serializeField( soi.getStructFieldData(wrappedObject, fref), fref.getFieldObjectInspector()); } catch (SerDeException e) { throw new IllegalStateException("SerDe Exception deserializing",e); } }
@Test public void testGet() throws Exception { HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector()); Assert.assertEquals(INT_CONST, ((Integer) r.get(0)).intValue()); Assert.assertEquals(LONG_CONST, ((Long) r.get(1)).longValue()); Assert.assertEquals(DOUBLE_CONST, ((Double) r.get(2)).doubleValue(), 0); Assert.assertEquals(STRING_CONST, r.get(3)); }
@Override public void remove(NamedNode master) { connection.sync(RedisCommands.SENTINEL_REMOVE, master.getName()); }
@Test public void testRemove() { Collection<RedisServer> masters = connection.masters(); connection.remove(masters.iterator().next()); }
public Properties getProperties() { return properties; }
@Test(expectedExceptions = SQLException.class) public void assertNonAlphanumericClientTags() throws SQLException { String clientTags = "d1,@d2,d3"; PrestoDriverUri parameters = createDriverUri("presto://localhost:8080?clientTags=" + clientTags); Properties properties = parameters.getProperties(); assertEquals(properties.getProperty(CLIENT_TAGS.getKey()), clientTags); }
public FEELFnResult<Range> invoke(@ParameterName("from") String from) { if (from == null || from.isEmpty() || from.isBlank()) { return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "cannot be null")); } Range.RangeBoundary startBoundary; if (from.startsWith("(") || from.startsWith("]")) { startBoundary = RangeBoundary.OPEN; } else if (from.startsWith("[")) { startBoundary = RangeBoundary.CLOSED; } else { return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "does not start with a valid character")); } Range.RangeBoundary endBoundary; if (from.endsWith(")") || from.endsWith("[")) { endBoundary = RangeBoundary.OPEN; } else if (from.endsWith("]")) { endBoundary = RangeBoundary.CLOSED; } else { return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "does not end with a valid character")); } String[] split = from.split("\\.\\."); if (split.length != 2) { return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "does not include two literals separated by `..` two dots characters")); } String leftString = split[0].substring(1); String rightString = split[1].substring(0, split[1].length() - 1); if ((leftString.isEmpty() || leftString.isBlank()) && (rightString.isEmpty() || rightString.isBlank())) { return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "at least one endpoint must not be null")); } BaseNode leftNode = parse(leftString); if (!nodeIsAllowed(leftNode)) { return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "left endpoint is not a recognised valid literal")); } BaseNode rightNode = parse(rightString); if (!nodeIsAllowed(rightNode)) { return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "right endpoint is not a recognised valid literal")); } Object left = leftNode.evaluate(getStubbed()); if (!nodeValueIsAllowed(left)) { return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "left endpoint is not a valid value " + left.getClass())); } Object right = rightNode.evaluate(getStubbed()); if (!nodeValueIsAllowed(right)) { return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "right endpoint is not a valid value " + right.getClass())); } if (!nodesReturnsSameType(left, right)) { return FEELFnResult.ofError(new InvalidParametersEvent(FEELEvent.Severity.ERROR, "from", "endpoints must be of equivalent types")); } return FEELFnResult.ofResult(new RangeImpl(startBoundary, (Comparable) left, (Comparable) right, endBoundary)); }
@Test void invokeInvalidTypes() { String from = "[if(false)..if(true)]"; FunctionTestUtil.assertResultError(rangeFunction.invoke(from), InvalidParametersEvent.class, from); }
private static void flakeIdGenerator(XmlGenerator gen, Map<String, ClientFlakeIdGeneratorConfig> flakeIdGenerators) { for (Map.Entry<String, ClientFlakeIdGeneratorConfig> entry : flakeIdGenerators.entrySet()) { ClientFlakeIdGeneratorConfig flakeIdGenerator = entry.getValue(); gen.open("flake-id-generator", "name", entry.getKey()) .node("prefetch-count", flakeIdGenerator.getPrefetchCount()) .node("prefetch-validity-millis", flakeIdGenerator.getPrefetchValidityMillis()) .close(); } }
@Test public void flakeIdGenerator() { ClientFlakeIdGeneratorConfig expected = new ClientFlakeIdGeneratorConfig(randomString()); expected.setPrefetchCount(randomInt()) .setPrefetchValidityMillis(randomInt()); clientConfig.addFlakeIdGeneratorConfig(expected); Map<String, ClientFlakeIdGeneratorConfig> actual = newConfigViaGenerator().getFlakeIdGeneratorConfigMap(); assertMap(clientConfig.getFlakeIdGeneratorConfigMap(), actual); }
public OSMReader setElevationProvider(ElevationProvider eleProvider) { if (eleProvider == null) throw new IllegalStateException("Use the NOOP elevation provider instead of null or don't call setElevationProvider"); if (!nodeAccess.is3D() && ElevationProvider.NOOP != eleProvider) throw new IllegalStateException("Make sure you graph accepts 3D data"); this.eleProvider = eleProvider; return this; }
@Test public void testReadEleFromDataProvider() { GraphHopper hopper = new GraphHopperFacade("test-osm5.xml"); // get N10E046.hgt.zip ElevationProvider provider = new SRTMProvider(GraphHopperTest.DIR); hopper.setElevationProvider(provider); hopper.importOrLoad(); Graph graph = hopper.getBaseGraph(); int n10 = AbstractGraphStorageTester.getIdOf(graph, 49.501); int n30 = AbstractGraphStorageTester.getIdOf(graph, 49.5011); int n50 = AbstractGraphStorageTester.getIdOf(graph, 49.5001); EdgeIteratorState edge = GHUtility.getEdge(graph, n50, n30); assertEquals(Helper.createPointList3D(49.5001, 11.501, 426, 49.5002, 11.5015, 441, 49.5011, 11.502, 410.0), edge.fetchWayGeometry(FetchMode.ALL)); edge = GHUtility.getEdge(graph, n10, n50); assertEquals(Helper.createPointList3D(49.501, 11.5001, 383.0, 49.5001, 11.501, 426.0), edge.fetchWayGeometry(FetchMode.ALL)); }
@VisibleForTesting AuthRequest buildAuthRequest(Integer socialType, Integer userType) { // 1. 先查找默认的配置项,从 application-*.yaml 中读取 AuthRequest request = authRequestFactory.get(SocialTypeEnum.valueOfType(socialType).getSource()); Assert.notNull(request, String.format("社交平台(%d) 不存在", socialType)); // 2. 查询 DB 的配置项,如果存在则进行覆盖 SocialClientDO client = socialClientMapper.selectBySocialTypeAndUserType(socialType, userType); if (client != null && Objects.equals(client.getStatus(), CommonStatusEnum.ENABLE.getStatus())) { // 2.1 构造新的 AuthConfig 对象 AuthConfig authConfig = (AuthConfig) ReflectUtil.getFieldValue(request, "config"); AuthConfig newAuthConfig = ReflectUtil.newInstance(authConfig.getClass()); BeanUtil.copyProperties(authConfig, newAuthConfig); // 2.2 修改对应的 clientId + clientSecret 密钥 newAuthConfig.setClientId(client.getClientId()); newAuthConfig.setClientSecret(client.getClientSecret()); if (client.getAgentId() != null) { // 如果有 agentId 则修改 agentId newAuthConfig.setAgentId(client.getAgentId()); } // 2.3 设置会 request 里,进行后续使用 ReflectUtil.setFieldValue(request, "config", newAuthConfig); } return request; }
@Test public void testBuildAuthRequest_clientEnable() { // 准备参数 Integer socialType = SocialTypeEnum.WECHAT_MP.getType(); Integer userType = randomPojo(SocialTypeEnum.class).getType(); // mock 获得对应的 AuthRequest 实现 AuthConfig authConfig = mock(AuthConfig.class); AuthRequest authRequest = mock(AuthDefaultRequest.class); ReflectUtil.setFieldValue(authRequest, "config", authConfig); when(authRequestFactory.get(eq("WECHAT_MP"))).thenReturn(authRequest); // mock 数据 SocialClientDO client = randomPojo(SocialClientDO.class, o -> o.setStatus(CommonStatusEnum.ENABLE.getStatus()) .setUserType(userType).setSocialType(socialType)); socialClientMapper.insert(client); // 调用 AuthRequest result = socialClientService.buildAuthRequest(socialType, userType); // 断言 assertSame(authRequest, result); assertNotSame(authConfig, ReflectUtil.getFieldValue(authRequest, "config")); }
public static boolean containsPath(DataSchema schema, String path) { return getField(schema, path) != null; }
@Test(dataProvider = "pathData") public void testContainsPath(String path, boolean expected) throws IOException { DataSchema validationDemoSchema = pdscToDataSchema(DATA_SCHEMA_PATH); Assert.assertEquals(DataSchemaUtil.containsPath(validationDemoSchema, path), expected); }
@Override public SchemaKStream<?> buildStream(final PlanBuildContext buildContext) { if (!joinKey.isForeignKey()) { ensureMatchingPartitionCounts(buildContext.getServiceContext().getTopicClient()); } final JoinerFactory joinerFactory = new JoinerFactory( buildContext, this, buildContext.buildNodeContext(getId().toString())); return joinerFactory.getJoiner(left.getNodeOutputType(), right.getNodeOutputType()).join(); }
@Test public void shouldPerformStreamToTableLeftJoin() { // Given: setupStream(left, leftSchemaKStream); setupTable(right, rightSchemaKTable); final JoinNode joinNode = new JoinNode(nodeId, LEFT, joinKey, true, left, right, empty(), "KAFKA"); // When: joinNode.buildStream(planBuildContext); // Then: verify(leftSchemaKStream).leftJoin( rightSchemaKTable, SYNTH_KEY, VALUE_FORMAT.getFormatInfo(), CONTEXT_STACKER ); }
private void writeObject(ObjectOutputStream out) throws IOException { out.defaultWriteObject(); final int size = dataSet.size(); out.writeInt(size); if (size > 0) { DataOutputViewStreamWrapper wrapper = new DataOutputViewStreamWrapper(out); for (T element : dataSet) { serializer.serialize(element, wrapper); } } }
@Test void testSerializationFailure() { try (ByteArrayOutputStream buffer = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(buffer)) { // a mock serializer that fails when writing CollectionInputFormat<ElementType> inFormat = new CollectionInputFormat<>( Collections.singleton(new ElementType()), new TestSerializer(false, true)); try { out.writeObject(inFormat); fail("should throw an exception"); } catch (TestException e) { // expected } catch (Exception e) { fail("Exception not properly forwarded"); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final String prefix = containerService.isContainer(directory) ? StringUtils.EMPTY : containerService.getKey(directory) + Path.DELIMITER; return this.list(directory, listener, prefix); }
@Test public void testListPlaceholder() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final Path placeholder = new SwiftDirectoryFeature(session).mkdir(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final AtomicBoolean callback = new AtomicBoolean(); assertTrue(new SwiftObjectListService(session).list(placeholder, new DisabledListProgressListener() { @Override public void chunk(final Path parent, final AttributedList<Path> list) { assertNotSame(AttributedList.EMPTY, list); callback.set(true); } }).isEmpty()); assertTrue(callback.get()); final Path placeholder2 = new SwiftDirectoryFeature(session).mkdir(new Path(placeholder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); assertTrue(new SwiftObjectListService(session).list(placeholder2, new DisabledListProgressListener()).isEmpty()); new SwiftDeleteFeature(session).delete(Arrays.asList(placeholder, placeholder2), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public void apply(IntentOperationContext<FlowRuleIntent> context) { Optional<IntentData> toUninstall = context.toUninstall(); Optional<IntentData> toInstall = context.toInstall(); if (toInstall.isPresent() && toUninstall.isPresent()) { Intent intentToInstall = toInstall.get().intent(); if (requireNonDisruptive(intentToInstall) && INSTALLED.equals(toUninstall.get().state())) { reallocate(context); return; } } if (!toInstall.isPresent() && !toUninstall.isPresent()) { // Nothing to do. intentInstallCoordinator.intentInstallSuccess(context); return; } List<FlowRuleIntent> uninstallIntents = context.intentsToUninstall(); List<FlowRuleIntent> installIntents = context.intentsToInstall(); List<FlowRule> flowRulesToUninstall; List<FlowRule> flowRulesToInstall; if (toUninstall.isPresent()) { // Remove tracked resource from both Intent and installable Intents. trackIntentResources(toUninstall.get(), uninstallIntents, REMOVE); // Retrieves all flow rules from all flow rule Intents. flowRulesToUninstall = uninstallIntents.stream() .map(FlowRuleIntent::flowRules) .flatMap(Collection::stream) .filter(flowRule -> flowRuleService.getFlowEntry(flowRule) != null) .collect(Collectors.toList()); } else { // No flow rules to be uninstalled. flowRulesToUninstall = Collections.emptyList(); } if (toInstall.isPresent()) { // Track resource from both Intent and installable Intents. trackIntentResources(toInstall.get(), installIntents, ADD); // Retrieves all flow rules from all flow rule Intents. flowRulesToInstall = installIntents.stream() .map(FlowRuleIntent::flowRules) .flatMap(Collection::stream) .collect(Collectors.toList()); } else { // No flow rules to be installed. flowRulesToInstall = Collections.emptyList(); } List<FlowRule> flowRuleToModify; List<FlowRule> dontTouch; // If both uninstall/install list contained equal (=match conditions are equal) FlowRules, // omit it from remove list, since it will/should be overwritten by install flowRuleToModify = flowRulesToInstall.stream() .filter(flowRule -> flowRulesToUninstall.stream().anyMatch(flowRule::equals)) .collect(Collectors.toList()); // If both contained exactMatch-ing FlowRules, remove from both list, // since it will result in no-op. dontTouch = flowRulesToInstall.stream() .filter(flowRule -> flowRulesToUninstall.stream().anyMatch(flowRule::exactMatch)) .collect(Collectors.toList()); flowRulesToUninstall.removeAll(flowRuleToModify); flowRulesToUninstall.removeAll(dontTouch); flowRulesToInstall.removeAll(flowRuleToModify); flowRulesToInstall.removeAll(dontTouch); flowRuleToModify.removeAll(dontTouch); if (flowRulesToInstall.isEmpty() && flowRulesToUninstall.isEmpty() && flowRuleToModify.isEmpty()) { // There is no flow rules to install/uninstall intentInstallCoordinator.intentInstallSuccess(context); return; } FlowRuleOperations.Builder builder = FlowRuleOperations.builder(); // Add flows flowRulesToInstall.forEach(builder::add); // Modify flows flowRuleToModify.forEach(builder::modify); // Remove flows flowRulesToUninstall.forEach(builder::remove); FlowRuleOperationsContext flowRuleOperationsContext = new FlowRuleOperationsContext() { @Override public void onSuccess(FlowRuleOperations ops) { intentInstallCoordinator.intentInstallSuccess(context); } @Override public void onError(FlowRuleOperations ops) { intentInstallCoordinator.intentInstallFailed(context); } }; FlowRuleOperations operations = builder.build(flowRuleOperationsContext); log.debug("applying intent {} -> {} with {} rules: {}", toUninstall.map(x -> x.key().toString()).orElse("<empty>"), toInstall.map(x -> x.key().toString()).orElse("<empty>"), operations.stages().stream().mapToLong(Set::size).sum(), operations.stages()); flowRuleService.apply(operations); }
@Test public void testRuleModify() { List<Intent> intentsToInstall = createFlowRuleIntents(); List<Intent> intentsToUninstall = createFlowRuleIntentsWithSameMatch(); IntentData toInstall = new IntentData(createP2PIntent(), IntentState.INSTALLING, new WallClockTimestamp()); toInstall = IntentData.compiled(toInstall, intentsToInstall); IntentData toUninstall = new IntentData(createP2PIntent(), IntentState.INSTALLED, new WallClockTimestamp()); toUninstall = IntentData.compiled(toUninstall, intentsToUninstall); IntentOperationContext<FlowRuleIntent> operationContext; IntentInstallationContext context = new IntentInstallationContext(toUninstall, toInstall); operationContext = new IntentOperationContext(intentsToUninstall, intentsToInstall, context); flowRuleService.load(operationContext.intentsToUninstall()); installer.apply(operationContext); IntentOperationContext successContext = intentInstallCoordinator.successContext; assertEquals(successContext, operationContext); assertEquals(0, flowRuleService.flowRulesRemove.size()); assertEquals(0, flowRuleService.flowRulesAdd.size()); assertEquals(1, flowRuleService.flowRulesModify.size()); FlowRuleIntent installedIntent = (FlowRuleIntent) intentsToInstall.get(0); assertEquals(flowRuleService.flowRulesModify.size(), installedIntent.flowRules().size()); assertTrue(flowRuleService.flowRulesModify.containsAll(installedIntent.flowRules())); }
public static Comparable canonicalizeForHashLookup(Comparable value) { if (value instanceof Number) { return Numbers.canonicalizeForHashLookup(value); } return value; }
@Test public void testCanonicalization() { assertSame("foo", canonicalizeForHashLookup("foo")); assertSame(null, canonicalizeForHashLookup(null)); assertEquals(1234L, ((Number) canonicalizeForHashLookup(1234)).longValue()); }
public List<IssueDto> sort() { String sort = query.sort(); Boolean asc = query.asc(); if (sort != null && asc != null) { return getIssueProcessor(sort).sort(issues, asc); } return issues; }
@Test public void should_not_sort_with_null_sort() { IssueDto issue1 = new IssueDto().setKee("A").setAssigneeUuid("perceval"); IssueDto issue2 = new IssueDto().setKee("B").setAssigneeUuid("arthur"); IssueDto issue3 = new IssueDto().setKee("C").setAssigneeUuid("vincent"); IssueDto issue4 = new IssueDto().setKee("D").setAssigneeUuid(null); List<IssueDto> dtoList = newArrayList(issue1, issue2, issue3, issue4); IssueQuery query = IssueQuery.builder().sort(null).build(); IssuesFinderSort issuesFinderSort = new IssuesFinderSort(dtoList, query); List<IssueDto> result = newArrayList(issuesFinderSort.sort()); assertThat(result).hasSize(4); assertThat(result.get(0).getAssigneeUuid()).isEqualTo("perceval"); assertThat(result.get(1).getAssigneeUuid()).isEqualTo("arthur"); assertThat(result.get(2).getAssigneeUuid()).isEqualTo("vincent"); assertThat(result.get(3).getAssigneeUuid()).isNull(); }
@Override public InterpreterResult interpret(String code, InterpreterContext context) { // choosing new name to class containing Main method String generatedClassName = "C" + UUID.randomUUID().toString().replace("-", ""); try { String res = StaticRepl.execute(generatedClassName, code); return new InterpreterResult(InterpreterResult.Code.SUCCESS, res); } catch (Exception e) { LOGGER.error("Exception in Interpreter while interpret", e); return new InterpreterResult(InterpreterResult.Code.ERROR, e.getMessage()); } }
@Test void testStaticReplWithoutMain() { StringBuffer sourceCode = new StringBuffer(); sourceCode.append("package org.mdkt;\n"); sourceCode.append("public class HelloClass {\n"); sourceCode.append(" public String hello() { return \"hello\"; }"); sourceCode.append("}"); InterpreterResult res = java.interpret(sourceCode.toString(), context); assertEquals(InterpreterResult.Code.ERROR, res.code()); }
public synchronized boolean tryWriteLock() { if (!isFree()) { return false; } else { status = FREE_STATUS; return true; } }
@Test public void multiTryWriteLockTest() { SimpleReadWriteLock simpleReadWriteLock = new SimpleReadWriteLock(); simpleReadWriteLock.tryWriteLock(); boolean result = simpleReadWriteLock.tryWriteLock(); Assert.isTrue(!result); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { try { final PathContainerService service = new DefaultPathContainerService(); if(service.isContainer(file)) { for(RootFolder r : session.roots()) { if(StringUtils.equalsIgnoreCase(file.getName(), PathNormalizer.name(r.getPath())) || StringUtils.equalsIgnoreCase(file.getName(), PathNormalizer.name(r.getName()))) { return this.toAttributes(r); } } throw new NotfoundException(file.getAbsolute()); } final FilesApi files = new FilesApi(session.getClient()); return this.toAttributes(files.filesGet_0(URIEncoder.encode(fileid.getPrefixedPath(file)))); } catch(ApiException e) { throw new StoregateExceptionMappingService(fileid).map("Failure to read attributes of {0}", e, file); } }
@Test public void testChangedNodeId() throws Exception { final StoregateIdProvider nodeid = new StoregateIdProvider(session); final Path room = new StoregateDirectoryFeature(session, nodeid).mkdir( new Path(String.format("/My files/%s", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path test = new StoregateTouchFeature(session, nodeid).touch(new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final String latestnodeid = test.attributes().getFileId(); assertNotNull(latestnodeid); // Assume previously seen but changed on server nodeid.cache(test, String.valueOf(RandomUtils.nextLong())); final StoregateAttributesFinderFeature f = new StoregateAttributesFinderFeature(session, nodeid); assertEquals(latestnodeid, f.find(test).getFileId()); new StoregateDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static Object parse(JsonParser parser) throws IOException { var token = parser.currentToken(); if (token == JsonToken.VALUE_NULL) { return null; } if (token.isScalarValue()) { if (token == JsonToken.VALUE_TRUE) { return true; } if (token == JsonToken.VALUE_FALSE) { return false; } if (token == JsonToken.VALUE_STRING) { return parser.getText(); } if (token == JsonToken.VALUE_NUMBER_INT) { return parser.getBigIntegerValue(); } if (token == JsonToken.VALUE_NUMBER_FLOAT) { return parser.getDoubleValue(); } throw new JsonParseException(parser, "Expecting {VALUE_TRUE, VALUE_FALSE, VALUE_STRING, VALUE_NUMBER_INT, VALUE_NUMBER_FLOAT} token, got " + token); } if (token == JsonToken.START_OBJECT) { var object = new HashMap<String, Object>(); String fieldName; while ((fieldName = parser.nextFieldName()) != null) { parser.nextToken(); var value = parse(parser); object.put(fieldName, value); } return object; } if (token == JsonToken.START_ARRAY) { var object = new ArrayList<>(); while ((token = parser.nextToken()) != JsonToken.END_ARRAY) { var value = parse(parser); object.add(value); } return object; } throw new JsonParseException(parser, "Unexpected token " + token); }
@Test void readNestedMap() throws IOException { //language=json var json = """ { "template": { "data": { "url": "https://tinkoff.ru", "number": 1 } } } """; try (var parser = JsonCommonModule.JSON_FACTORY.createParser(json)) { parser.nextToken(); var parseResult = JsonObjectCodec.parse(parser); Assertions.assertThat(parseResult) .asInstanceOf(InstanceOfAssertFactories.map(String.class, Object.class)) .hasSize(1) .containsKey("template") .extractingByKey("template") .asInstanceOf(InstanceOfAssertFactories.map(String.class, Object.class)) .hasSize(1) .containsKey("data") .extractingByKey("data") .asInstanceOf(InstanceOfAssertFactories.map(String.class, Object.class)) .hasSize(2) .containsEntry("url", "https://tinkoff.ru") .containsEntry("number", BigInteger.ONE); } }
public String getStageName() { if(isBuilding()) { try { return buildLocator.split("/")[2]; } catch (ArrayIndexOutOfBoundsException e) { return null; } } return null; }
@Test public void shouldReturnNullForStageName() { AgentBuildingInfo agentBuildingInfo = new AgentBuildingInfo("buildInfo", "foo"); assertNull(agentBuildingInfo.getStageName()); }
@Override public boolean remove(Object o) { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void test_remove() { set.remove(1); }
public static int random(double[] prob) { int[] ans = random(prob, 1); return ans[0]; }
@Test public void testRandom() { System.out.println("random"); double[] prob = {0.473646292, 0.206116725, 0.009308497, 0.227844687, 0.083083799}; int[] sample = MathEx.random(prob, 300); double[][] hist = Histogram.of(sample, 5); double[] p = new double[5]; for (int i = 0; i < 5; i++) { p[i] = hist[2][i] / 300.0; } assertTrue(MathEx.KullbackLeiblerDivergence(prob, p) < 0.05); }
@Override public void populateContainer(MigrationContainer container) { container.add(executorType); populateFromMigrationSteps(container); }
@Test public void populateContainer_adds_classes_of_all_steps_defined_in_MigrationSteps() { when(migrationSteps.readAll()).thenReturn(asList( new RegisteredMigrationStep(1, "foo", MigrationStep1.class), new RegisteredMigrationStep(2, "bar", MigrationStep2.class), new RegisteredMigrationStep(3, "dor", MigrationStep3.class))); migrationContainer.startComponents(); underTest.populateContainer(migrationContainer); assertThat(migrationContainer.getComponentsByType(MigrationStep1.class)).isNotNull(); assertThat(migrationContainer.getComponentsByType(MigrationStep2.class)).isNotNull(); assertThat(migrationContainer.getComponentsByType(MigrationStep3.class)).isNotNull(); }
public List<ParsedTerm> filterElementsContainingUsefulInformation(final Map<String, List<ParsedTerm>> parsedTermsGroupedByField) { return parsedTermsGroupedByField.values() .stream() .map(this::filterElementsContainingUsefulInformation) .flatMap(Collection::stream) .collect(Collectors.toList()); }
@Test void doesNotLimitOnSingleElement() { final Map<String, List<ParsedTerm>> fieldTerms = Map.of("field", List.of( ParsedTerm.create("field", "oh!") )); assertThat(toTest.filterElementsContainingUsefulInformation(fieldTerms)) .hasSize(1) .contains(ParsedTerm.create("field", "oh!")); }
public Matrix.QR qr() { return qr(false); }
@Test public void testQR() { System.out.println("QR"); float[][] A = { {0.9000f, 0.4000f, 0.7000f}, {0.4000f, 0.5000f, 0.3000f}, {0.7000f, 0.3000f, 0.8000f} }; float[] b = {0.5f, 0.5f, 0.5f}; float[] x = {-0.2027027f, 0.8783784f, 0.4729730f}; Matrix a = Matrix.of(A); Matrix.QR qr = a.qr(); float[] x2 = qr.solve(b); assertEquals(x.length, x2.length); for (int i = 0; i < x.length; i++) { assertEquals(x[i], x2[i], 1E-6f); } float[][] B = { {0.5f, 0.2f}, {0.5f, 0.8f}, {0.5f, 0.3f} }; float[][] X = { {-0.2027027f, -1.2837838f}, { 0.8783784f, 2.2297297f}, { 0.4729730f, 0.6621622f} }; Matrix X2 = Matrix.of(B); qr.solve(X2); for (int i = 0; i < X.length; i++) { for (int j = 0; j < X[i].length; j++) { assertEquals(X[i][j], X2.get(i, j), 1E-6f); } } }
public static Optional<Object[]> coerceParams(Class<?> currentIdxActualParameterType, Class<?> expectedParameterType, Object[] actualParams, int i) { Object actualObject = actualParams[i]; Optional<Object> coercedObject = coerceParam(currentIdxActualParameterType, expectedParameterType, actualObject); return coercedObject.map(o -> actualCoerceParams(actualParams, o, i)); }
@Test void coerceParamsToDateTimeConverted() { Object value = LocalDate.now(); Object[] actualParams = {value, "NOT_DATE"}; Optional<Object[]> retrieved = CoerceUtil.coerceParams(LocalDate.class, ZonedDateTime.class, actualParams, 0); assertNotNull(retrieved); assertTrue(retrieved.isPresent()); Object[] retrievedObjects = retrieved.get(); assertTrue(retrievedObjects[0] instanceof ZonedDateTime); ZonedDateTime zdtRetrieved = (ZonedDateTime) retrievedObjects[0]; assertEquals(value, zdtRetrieved.toLocalDate()); assertEquals(ZoneOffset.UTC, zdtRetrieved.getOffset()); assertEquals(0, zdtRetrieved.getHour()); assertEquals(0, zdtRetrieved.getMinute()); assertEquals(0, zdtRetrieved.getSecond()); assertEquals(actualParams[1], retrievedObjects[1]); }
@Override public void registerInstance(String namespaceId, String serviceName, Instance instance) throws NacosException { NamingUtils.checkInstanceIsLegal(instance); boolean ephemeral = instance.isEphemeral(); String clientId = IpPortBasedClient.getClientId(instance.toInetAddr(), ephemeral); createIpPortClientIfAbsent(clientId); Service service = getService(namespaceId, serviceName, ephemeral); clientOperationService.registerInstance(service, instance, clientId); }
@Test void testRegisterInstance() throws NacosException { instanceOperatorClient.registerInstance("A", "B", new Instance()); Mockito.verify(clientOperationService).registerInstance(Mockito.any(), Mockito.any(), Mockito.anyString()); }
public int getMaxIdle() { return maxIdle; }
@Test public void maxIdleDefaultValueTest() { assertEquals(DEFAULT_MAX_IDLE, redisConfigProperties.getMaxIdle()); }
UuidGenerator loadUuidGenerator() { Class<? extends UuidGenerator> objectFactoryClass = options.getUuidGeneratorClass(); ClassLoader classLoader = classLoaderSupplier.get(); ServiceLoader<UuidGenerator> loader = ServiceLoader.load(UuidGenerator.class, classLoader); if (objectFactoryClass == null) { return loadSingleUuidGeneratorOrDefault(loader); } return loadSelectedUuidGenerator(loader, objectFactoryClass); }
@Test void test_case_8() { Options options = () -> IncrementingUuidGenerator.class; UuidGeneratorServiceLoader loader = new UuidGeneratorServiceLoader( () -> new ServiceLoaderTestClassLoader(UuidGenerator.class, RandomUuidGenerator.class, IncrementingUuidGenerator.class, OtherGenerator.class, YetAnotherGenerator.class), options); assertThat(loader.loadUuidGenerator(), instanceOf(IncrementingUuidGenerator.class)); }
@Override public Encoder getMapValueEncoder() { return encoder; }
@Test public void shouldSerializeTheMapCorrectly() throws Exception { assertThat(mapCodec.getMapValueEncoder().encode(map).toString(CharsetUtil.UTF_8)) .isEqualTo("{\"foo\":[\"bar\"]}"); }
@Override @NotNull public List<PartitionStatistics> select(Collection<PartitionStatistics> statistics, Set<Long> excludeTables) { long now = System.currentTimeMillis(); return statistics.stream() .filter(p -> p.getNextCompactionTime() <= now) .filter(p -> !excludeTables.contains(p.getPartition().getTableId())) .filter(p -> isReadyForCompaction(p, now)) .collect(Collectors.toList()); }
@Test public void testCompactionTimeNotReached() { List<PartitionStatistics> statisticsList = new ArrayList<>(); final PartitionIdentifier partitionIdentifier = new PartitionIdentifier(1, 2, 4); PartitionStatistics statistics = new PartitionStatistics(partitionIdentifier); statistics.setCompactionVersion(new PartitionVersion(1, 0)); statistics.setCurrentVersion(new PartitionVersion(MIN_COMPACTION_VERSIONS + 1, System.currentTimeMillis())); statisticsList.add(statistics); statistics.setNextCompactionTime(System.currentTimeMillis() + 60 * 1000); Assert.assertEquals(0, selector.select(statisticsList, new HashSet<Long>()).size()); statistics.setNextCompactionTime(System.currentTimeMillis() - 10); Assert.assertSame(statistics, selector.select(statisticsList, new HashSet<Long>()).get(0)); }
@ScalarFunction public static String[] uniqueNgrams(String input, int length) { if (length == 0 || length > input.length()) { return new String[0]; } ObjectSet<String> ngramSet = new ObjectLinkedOpenHashSet<>(); for (int i = 0; i < input.length() - length + 1; i++) { ngramSet.add(input.substring(i, i + length)); } return ngramSet.toArray(new String[0]); }
@Test(dataProvider = "ngramTestCases") public void testNGram(String input, int minGram, int maxGram, String[] expectedExactNGram, String[] expectedNGram) { assertEquals(StringFunctions.uniqueNgrams(input, maxGram), expectedExactNGram); assertEquals(StringFunctions.uniqueNgrams(input, minGram, maxGram), expectedNGram); }
@Udf(description = "Converts a string representation of a date in the given format" + " into the number of days since 1970-01-01 00:00:00 UTC/GMT.") public int stringToDate( @UdfParameter( description = "The string representation of a date.") final String formattedDate, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { // NB: We do not perform a null here preferring to throw an exception as // there is no sentinel value for a "null" Date. try { final DateTimeFormatter formatter = formatters.get(formatPattern); return ((int)LocalDate.parse(formattedDate, formatter).toEpochDay()); } catch (final ExecutionException | RuntimeException e) { throw new KsqlFunctionException("Failed to parse date '" + formattedDate + "' with formatter '" + formatPattern + "': " + e.getMessage(), e); } }
@Test public void shouldThrowIfFormatInvalid() { // When: final Exception e = assertThrows( KsqlFunctionException.class, () -> udf.stringToDate("2021-12-01", "invalid") ); // Then: assertThat(e.getMessage(), containsString("Failed to parse date '2021-12-01' with formatter 'invalid'")); }
@Override public List<String> listPartitionNames(String dbName, String tblName, TableVersionRange version) { return hmsOps.getPartitionKeys(dbName, tblName); }
@Test public void testGetPartitionKeys() { Assert.assertEquals( Lists.newArrayList("col1"), hudiMetadata.listPartitionNames("db1", "tbl1", TableVersionRange.empty())); }
public void validate(ExternalIssueReport report, Path reportPath) { if (report.rules != null && report.issues != null) { Set<String> ruleIds = validateRules(report.rules, reportPath); validateIssuesCctFormat(report.issues, ruleIds, reportPath); } else if (report.rules == null && report.issues != null) { String documentationLink = documentationLinkGenerator.getDocumentationLink(DOCUMENTATION_SUFFIX); LOGGER.warn("External issues were imported with a deprecated format which will be removed soon. " + "Please switch to the newest format to fully benefit from Clean Code: {}", documentationLink); validateIssuesDeprecatedFormat(report.issues, reportPath); } else { throw new IllegalStateException(String.format("Failed to parse report '%s': invalid report detected.", reportPath)); } }
@Test public void validate_whenMissingEngineIdField_shouldThrowException() throws IOException { ExternalIssueReport report = read(REPORTS_LOCATION); report.rules[0].engineId = null; assertThatThrownBy(() -> validator.validate(report, reportPath)) .isInstanceOf(IllegalStateException.class) .hasMessage("Failed to parse report 'report-path': missing mandatory field 'engineId'."); }
public ImmutableSet<EntityDescriptor> resolve(GRN entity) { // TODO: Replace entity excerpt usage with GRNDescriptors once we implemented GRN descriptors for every entity final ImmutableMap<GRN, Optional<String>> entityExcerpts = contentPackService.listAllEntityExcerpts().stream() // TODO: Use the GRNRegistry instead of manually building a GRN. Requires all entity types to be in the registry. .collect(ImmutableMap.toImmutableMap(e -> GRNType.create(e.type().name(), e.type().name() + ":").newGRNBuilder().entity(e.id().id()).build(), v -> Optional.ofNullable(v.title()))); final Set<org.graylog2.contentpacks.model.entities.EntityDescriptor> descriptors = contentPackService.resolveEntities(Collections.singleton(org.graylog2.contentpacks.model.entities.EntityDescriptor.builder() .id(ModelId.of(entity.entity())) // TODO: This is a hack! Until we stop using the content-pack dependency resolver, we have to use a different version for dashboards here .type(ModelType.of(entity.type(), "dashboard".equals(entity.type()) ? "2" : "1")) // TODO: Any way of NOT hardcoding the version here? .build())); final ImmutableSet<GRN> dependencies = descriptors.stream() .filter(dep -> { // Filter dependencies that aren't needed for grants sharing // TODO This is another reason why we shouldn't be using the content pack resolver ¯\_(ツ)_/¯ final Set<ModelType> ignoredDeps = IGNORED_DEPENDENCIES.getOrDefault(entity.grnType(), ImmutableSet.of()); return !ignoredDeps.contains(dep.type()); }) // TODO: Work around from using the content pack dependency resolver: // We've added stream_title content pack entities in https://github.com/Graylog2/graylog2-server/pull/17089, // but in this context we want to return the actual dependent Stream to add additional permissions to. .map(descriptor -> ModelTypes.STREAM_REF_V1.equals(descriptor.type()) ? org.graylog2.contentpacks.model.entities.EntityDescriptor.create(descriptor.id(), ModelTypes.STREAM_V1) : descriptor) .map(descriptor -> grnRegistry.newGRN(descriptor.type().name(), descriptor.id().id())) .filter(dependency -> !entity.equals(dependency)) // Don't include the given entity in dependencies .collect(ImmutableSet.toImmutableSet()); final Map<GRN, Set<GRN>> targetOwners = grantService.getOwnersForTargets(dependencies); return dependencies.stream() .map(dependency -> { String title = entityExcerpts.get(dependency) != null ? entityExcerpts.get(dependency).orElse("unnamed dependency: <" + dependency + ">") : "unknown dependency: <" + dependency + ">"; return EntityDescriptor.create( dependency, title, getOwners(targetOwners.get(dependency)) ); }) .collect(ImmutableSet.toImmutableSet()); }
@Test @DisplayName("Try a stream reference dependency resolve") void resolveStreamReference() { final String TEST_TITLE = "Test Stream Title"; final EntityExcerpt streamExcerpt = EntityExcerpt.builder() .type(ModelTypes.STREAM_V1) .id(ModelId.of("54e3deadbeefdeadbeefaffe")) .title(TEST_TITLE).build(); final EntityExcerpt streamRefExcerpt = EntityExcerpt.builder() .type(ModelTypes.STREAM_REF_V1) .id(ModelId.of("54e3deadbeefdeadbeefaffe")) .title(TEST_TITLE).build(); when(contentPackService.listAllEntityExcerpts()).thenReturn(ImmutableSet.of(streamExcerpt, streamRefExcerpt)); final EntityDescriptor streamDescriptor = EntityDescriptor.builder().type(ModelTypes.STREAM_REF_V1).id(ModelId.of("54e3deadbeefdeadbeefaffe")).build(); when(contentPackService.resolveEntities(any())).thenReturn(ImmutableSet.of(streamDescriptor)); when(grnDescriptorService.getDescriptor(any(GRN.class))).thenAnswer(a -> { GRN grnArg = a.getArgument(0); return GRNDescriptor.builder().grn(grnArg).title("dummy").build(); }); final GRN dashboard = grnRegistry.newGRN("dashboard", "33e3deadbeefdeadbeefaffe"); final ImmutableSet<org.graylog.security.entities.EntityDescriptor> missingDependencies = entityDependencyResolver.resolve(dashboard); assertThat(missingDependencies).hasSize(1); assertThat(missingDependencies.asList().get(0)).satisfies(descriptor -> { assertThat(descriptor.id().toString()).isEqualTo("grn::::stream:54e3deadbeefdeadbeefaffe"); assertThat(descriptor.title()).isEqualTo(TEST_TITLE); assertThat(descriptor.owners()).hasSize(1); assertThat(descriptor.owners().asList().get(0).grn().toString()).isEqualTo("grn::::user:jane"); }); }
public static File openFile(String path, String fileName) { return openFile(path, fileName, false); }
@Test void openFile() { File file = DiskUtils.openFile(testFile.getParent(), testFile.getName()); assertNotNull(file); assertEquals(testFile.getPath(), file.getPath()); assertEquals(testFile.getName(), file.getName()); }
@Override public Optional<SimpleLock> lock(LockConfiguration lockConfiguration) { boolean lockObtained = doLock(lockConfiguration); if (lockObtained) { return Optional.of(new StorageLock(lockConfiguration, storageAccessor)); } else { return Optional.empty(); } }
@Test void shouldNotCacheRecordIfUpdateFailed() { when(storageAccessor.insertRecord(LOCK_CONFIGURATION)).thenReturn(false); when(storageAccessor.updateRecord(LOCK_CONFIGURATION)).thenThrow(LOCK_EXCEPTION); assertThatThrownBy(() -> lockProvider.lock(LOCK_CONFIGURATION)).isSameAs(LOCK_EXCEPTION); assertThatThrownBy(() -> lockProvider.lock(LOCK_CONFIGURATION)).isSameAs(LOCK_EXCEPTION); verify(storageAccessor, times(2)).insertRecord(LOCK_CONFIGURATION); }
public static Method getMostSpecificMethod(Method method, Class<?> targetClass) { if (targetClass != null && targetClass != method.getDeclaringClass() && isOverridable(method, targetClass)) { try { if (Modifier.isPublic(method.getModifiers())) { try { return targetClass.getMethod(method.getName(), method.getParameterTypes()); } catch (NoSuchMethodException ex) { return method; } } else { return method; } } catch (SecurityException ex) { // Security settings are disallowing reflective access; fall back to 'method' below. } } return method; }
@Test public void testGetMostSpecificMethodWhenClassIsNull() throws NoSuchMethodException { Method method = AbstractMap.class.getDeclaredMethod("clone"); Method specificMethod = ClassUtils.getMostSpecificMethod(method, null); assertEquals(AbstractMap.class, specificMethod.getDeclaringClass()); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } VectorClock that = (VectorClock) o; return replicaTimestamps.equals(that.replicaTimestamps); }
@Test public void testEquals() { final VectorClock clock = vectorClock(uuidParams[0], 1, uuidParams[1], 2); assertEquals(clock, vectorClock(uuidParams[0], 1, uuidParams[1], 2)); assertEquals(clock, new VectorClock(clock)); }
@Override public String getProcedureTerm() { return null; }
@Test void assertGetProcedureTerm() { assertNull(metaData.getProcedureTerm()); }
@Override public void start(EdgeExplorer explorer, int startNode) { IntArrayDeque stack = new IntArrayDeque(); GHBitSet explored = createBitSet(); stack.addLast(startNode); int current; while (stack.size() > 0) { current = stack.removeLast(); if (!explored.contains(current) && goFurther(current)) { EdgeIterator iter = explorer.setBaseNode(current); while (iter.next()) { int connectedId = iter.getAdjNode(); if (checkAdjacent(iter)) { stack.addLast(connectedId); } } explored.add(current); } } }
@Test public void testDFS1() { DepthFirstSearch dfs = new DepthFirstSearch() { @Override protected GHBitSet createBitSet() { return new GHBitSetImpl(); } @Override public boolean goFurther(int v) { counter++; assertTrue(!set.contains(v), "v " + v + " is already contained in set. iteration:" + counter); set.add(v); list.add(v); return super.goFurther(v); } }; BooleanEncodedValue accessEnc = new SimpleBooleanEncodedValue("access", true); EncodedValue.InitializerConfig evConf = new EncodedValue.InitializerConfig(); accessEnc.init(evConf); BaseGraph g = new BaseGraph.Builder(evConf.getRequiredBytes()).create(); g.edge(1, 2).setDistance(1).set(accessEnc, true, false); g.edge(1, 5).setDistance(1).set(accessEnc, true, false); g.edge(1, 4).setDistance(1).set(accessEnc, true, false); g.edge(2, 3).setDistance(1).set(accessEnc, true, false); g.edge(3, 4).setDistance(1).set(accessEnc, true, false); g.edge(5, 6).setDistance(1).set(accessEnc, true, false); g.edge(6, 4).setDistance(1).set(accessEnc, true, false); dfs.start(g.createEdgeExplorer(AccessFilter.outEdges(accessEnc)), 1); assertTrue(counter > 0); assertEquals(list.toString(), "[1, 2, 3, 4, 5, 6]"); }
@Override public void execute(Runnable command) { if (command == null) { throw new NullPointerException(); } try { super.execute(command); } catch (RejectedExecutionException rx) { // retry to offer the task into queue. final TaskQueue queue = (TaskQueue) super.getQueue(); try { if (!queue.retryOffer(command, 0, TimeUnit.MILLISECONDS)) { throw new RejectedExecutionException("Queue capacity is full.", rx); } } catch (InterruptedException x) { throw new RejectedExecutionException(x); } } }
@Disabled("replaced to testEagerThreadPoolFast for performance") @Test void testEagerThreadPool() throws Exception { String name = "eager-tf"; int queues = 5; int cores = 5; int threads = 10; // alive 1 second long alive = 1000; // init queue and executor TaskQueue<Runnable> taskQueue = new TaskQueue<Runnable>(queues); final EagerThreadPoolExecutor executor = new EagerThreadPoolExecutor( cores, threads, alive, TimeUnit.MILLISECONDS, taskQueue, new NamedThreadFactory(name, true), new AbortPolicyWithReport(name, URL)); taskQueue.setExecutor(executor); for (int i = 0; i < 15; i++) { Thread.sleep(50); executor.execute(() -> { System.out.println( "thread number in current pool:" + executor.getPoolSize() + ", task number in task queue:" + executor.getQueue().size() + " executor size: " + executor.getPoolSize()); try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } }); } Thread.sleep(5000); // cores theads are all alive. Assertions.assertEquals(executor.getPoolSize(), cores, "more than cores threads alive!"); }
public static Ip6Prefix valueOf(byte[] address, int prefixLength) { return new Ip6Prefix(Ip6Address.valueOf(address), prefixLength); }
@Test public void testValueOfAddressIPv6() { Ip6Address ipAddress; Ip6Prefix ipPrefix; ipAddress = Ip6Address.valueOf("1111:2222:3333:4444:5555:6666:7777:8888"); ipPrefix = Ip6Prefix.valueOf(ipAddress, 120); assertThat(ipPrefix.toString(), is("1111:2222:3333:4444:5555:6666:7777:8800/120")); ipPrefix = Ip6Prefix.valueOf(ipAddress, 128); assertThat(ipPrefix.toString(), is("1111:2222:3333:4444:5555:6666:7777:8888/128")); ipAddress = Ip6Address.valueOf("::"); ipPrefix = Ip6Prefix.valueOf(ipAddress, 0); assertThat(ipPrefix.toString(), is("::/0")); ipPrefix = Ip6Prefix.valueOf(ipAddress, 128); assertThat(ipPrefix.toString(), is("::/128")); ipAddress = Ip6Address.valueOf("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"); ipPrefix = Ip6Prefix.valueOf(ipAddress, 0); assertThat(ipPrefix.toString(), is("::/0")); ipPrefix = Ip6Prefix.valueOf(ipAddress, 64); assertThat(ipPrefix.toString(), is("ffff:ffff:ffff:ffff::/64")); ipPrefix = Ip6Prefix.valueOf(ipAddress, 128); assertThat(ipPrefix.toString(), is("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128")); }
public static Optional<ServiceInstance> getStorageNodeAtHost(ApplicationInstance application, HostName hostName) { Set<ServiceInstance> storageNodesOnHost = application.serviceClusters().stream() .filter(VespaModelUtil::isStorage) .flatMap(cluster -> cluster.serviceInstances().stream()) .filter(service -> service.hostName().equals(hostName)) .collect(Collectors.toSet()); if (storageNodesOnHost.isEmpty()) { return Optional.empty(); } if (storageNodesOnHost.size() > 1) { throw new RuntimeException("Expected application " + application.applicationInstanceId() + " to have exactly one storage node service on host " + hostName + " but got " + storageNodesOnHost.size() + ": " + storageNodesOnHost); } return storageNodesOnHost.stream().findAny(); }
@Test public void testGetStorageNodeAtHost() { Optional<ServiceInstance> service = VespaModelUtil.getStorageNodeAtHost(application, storage0Host); assertTrue(service.isPresent()); assertEquals(storage0, service.get()); }
public static DateTimeFormatter createDateTimeFormatter(String format, Mode mode) { DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); boolean formatContainsHourOfAMPM = false; for (Token token : tokenize(format)) { switch (token.getType()) { case DateFormat.TEXT: builder.appendLiteral(token.getText()); break; case DateFormat.DD: builder.appendValue(DAY_OF_MONTH, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.HH24: builder.appendValue(HOUR_OF_DAY, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.HH: builder.appendValue(HOUR_OF_AMPM, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); formatContainsHourOfAMPM = true; break; case DateFormat.MI: builder.appendValue(MINUTE_OF_HOUR, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.MM: builder.appendValue(MONTH_OF_YEAR, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.SS: builder.appendValue(SECOND_OF_MINUTE, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.YY: builder.appendValueReduced(YEAR, 2, 2, 2000); break; case DateFormat.YYYY: builder.appendValue(YEAR, 4); break; case DateFormat.UNRECOGNIZED: default: throw new PrestoException( StandardErrorCode.INVALID_FUNCTION_ARGUMENT, String.format("Failed to tokenize string [%s] at offset [%d]", token.getText(), token.getCharPositionInLine())); } } try { // Append default values(0) for time fields(HH24, HH, MI, SS) because JSR-310 does not accept bare Date value as DateTime if (formatContainsHourOfAMPM) { // At the moment format does not allow to include AM/PM token, thus it was never possible to specify PM hours using 'HH' token in format // Keep existing behaviour by defaulting to 0(AM) for AMPM_OF_DAY if format string contains 'HH' builder.parseDefaulting(HOUR_OF_AMPM, 0) .parseDefaulting(AMPM_OF_DAY, 0); } else { builder.parseDefaulting(HOUR_OF_DAY, 0); } return builder.parseDefaulting(MINUTE_OF_HOUR, 0) .parseDefaulting(SECOND_OF_MINUTE, 0) .toFormatter(); } catch (UnsupportedOperationException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, e); } }
@Test(expectedExceptions = PrestoException.class) public void testParserInvalidTokenCreate2() { DateFormatParser.createDateTimeFormatter("yyym/mm/dd", PARSER); }
public ConfigurationProperty create(String key, String value, String encryptedValue, Boolean isSecure) { ConfigurationProperty configurationProperty = new ConfigurationProperty(); configurationProperty.setConfigurationKey(new ConfigurationKey(key)); if (isNotBlank(value) && isNotBlank(encryptedValue)) { configurationProperty.addError("configurationValue", "You may only specify `value` or `encrypted_value`, not both!"); configurationProperty.addError("encryptedValue", "You may only specify `value` or `encrypted_value`, not both!"); configurationProperty.setConfigurationValue(new ConfigurationValue(value)); configurationProperty.setEncryptedValue(new EncryptedConfigurationValue(encryptedValue)); return configurationProperty; } if (isSecure) { if (isNotBlank(encryptedValue)) { configurationProperty.setEncryptedValue(new EncryptedConfigurationValue(encryptedValue)); } if (isNotBlank(value)) { configurationProperty.setEncryptedValue(new EncryptedConfigurationValue(encrypt(value))); } } else { if (isNotBlank(encryptedValue)) { configurationProperty.addError("encryptedValue", "encrypted_value cannot be specified to a unsecured property."); configurationProperty.setEncryptedValue(new EncryptedConfigurationValue(encryptedValue)); } if (value != null) { configurationProperty.setConfigurationValue(new ConfigurationValue(value)); } } if (isNotBlank(configurationProperty.getEncryptedValue())) { configurationProperty.setEncryptedValue(new EncryptedConfigurationValue(configurationProperty.getEncryptedValue())); } return configurationProperty; }
@Test public void shouldCreateWithErrorsIfBothPlainAndEncryptedTextInputAreSpecifiedForUnSecuredProperty() { Property key = new Property("key"); key.with(Property.SECURE, false); ConfigurationProperty property = new ConfigurationPropertyBuilder().create("key", "value", "enc_value", false); assertThat(property.errors().get("configurationValue").get(0), is("You may only specify `value` or `encrypted_value`, not both!")); assertThat(property.errors().get("encryptedValue").get(0), is("You may only specify `value` or `encrypted_value`, not both!")); assertThat(property.getConfigurationValue().getValue(), is("value")); assertThat(property.getEncryptedValue(), is("enc_value")); }
@Override public Ring<T> createRing(Map<T, Integer> pointsMap) { return _ringFactory.createRing(pointsMap); }
@Test(groups = { "small", "back-end" }) public void testFactoryWithMultiProbe() { RingFactory<String> factory = new DelegatingRingFactory<>(configBuilder("multiProbe", null)); Ring<String> ring = factory.createRing(buildPointsMap(10)); assertTrue(ring instanceof MPConsistentHashRing); }
public static List<String> splitPlainTextParagraphs( List<String> lines, int maxTokensPerParagraph) { return internalSplitTextParagraphs( lines, maxTokensPerParagraph, (text) -> internalSplitLines( text, maxTokensPerParagraph, false, s_plaintextSplitOptions)); }
@Test public void canSplitTextParagraphsOnClosingBrackets() { List<String> input = Arrays.asList( "This is a test of the emergency broadcast system) This is only a test", "We repeat) this is only a test) A unit test", "A small note] And another) And once again] Seriously this is the end}" + " We're finished} All set} Bye.", "Done."); List<String> expected = Arrays.asList( "This is a test of the emergency broadcast system)", "This is only a test", "We repeat) this is only a test) A unit test", "A small note] And another) And once again]", "Seriously this is the end} We're finished} All set} Bye. Done."); List<String> result = TextChunker.splitPlainTextParagraphs(input, 15); Assertions.assertEquals(expected, result); }
@Override public void unbindSocialUser(Long userId, Integer userType, Integer socialType, String openid) { // 获得 openid 对应的 SocialUserDO 社交用户 SocialUserDO socialUser = socialUserMapper.selectByTypeAndOpenid(socialType, openid); if (socialUser == null) { throw exception(SOCIAL_USER_NOT_FOUND); } // 获得对应的社交绑定关系 socialUserBindMapper.deleteByUserTypeAndUserIdAndSocialType(userType, userId, socialUser.getType()); }
@Test public void testUnbindSocialUser_notFound() { // 调用,并断言 assertServiceException( () -> socialUserService.unbindSocialUser(randomLong(), UserTypeEnum.ADMIN.getValue(), SocialTypeEnum.GITEE.getType(), "test_openid"), SOCIAL_USER_NOT_FOUND); }
@Override public synchronized Request poll(Task task) { Request poll = priorityQueuePlus.poll(); if (poll != null) { return poll; } poll = noPriorityQueue.poll(); if (poll != null) { return poll; } return priorityQueueMinus.poll(); }
@Test public void testDifferentPriority() { Request request = new Request("a"); request.setPriority(100); priorityScheduler.push(request,task); request = new Request("b"); request.setPriority(900); priorityScheduler.push(request,task); request = new Request("c"); priorityScheduler.push(request,task); request = new Request("d"); request.setPriority(-900); priorityScheduler.push(request,task); Request poll = priorityScheduler.poll(task); Assert.assertEquals("b",poll.getUrl()); poll = priorityScheduler.poll(task); Assert.assertEquals("a",poll.getUrl()); poll = priorityScheduler.poll(task); Assert.assertEquals("c",poll.getUrl()); poll = priorityScheduler.poll(task); Assert.assertEquals("d",poll.getUrl()); }
private Main() { // Utility Class. }
@Test public void downloadsDependenciesForGithub() throws Exception { final File pwd = temp.newFolder(); Main.main(String.format("--workdir=%s", pwd.getAbsolutePath())); final Path logstash = pwd.toPath().resolve("logstash").resolve("logstash-main"); assertThat(logstash.toFile().exists(), is(true)); final File jruby = pwd.toPath().resolve("jruby").toFile(); assertThat(jruby.exists(), is(true)); assertThat(jruby.isDirectory(), is(true)); assertThat(logstash.resolve("Gemfile").toFile().exists(), is(true)); }
public void fsck() { final long startTime = Time.monotonicNow(); try { String warnMsg = "Now FSCK to DFSRouter is unstable feature. " + "There may be incompatible changes between releases."; LOG.warn(warnMsg); out.println(warnMsg); String msg = "Federated FSCK started by " + UserGroupInformation.getCurrentUser() + " from " + remoteAddress + " at " + new Date(); LOG.info(msg); out.println(msg); // Check each Namenode in the federation StateStoreService stateStore = router.getStateStore(); MembershipStore membership = stateStore.getRegisteredRecordStore(MembershipStore.class); GetNamenodeRegistrationsRequest request = GetNamenodeRegistrationsRequest.newInstance(); GetNamenodeRegistrationsResponse response = membership.getNamenodeRegistrations(request); List<MembershipState> memberships = response.getNamenodeMemberships(); Collections.sort(memberships); for (MembershipState nn : memberships) { if (nn.getState() == FederationNamenodeServiceState.ACTIVE) { try { String webAddress = nn.getWebAddress(); out.write("Checking " + nn + " at " + webAddress + "\n"); remoteFsck(nn); } catch (IOException ioe) { out.println("Cannot query " + nn + ": " + ioe.getMessage() + "\n"); } } } out.println("Federated FSCK ended at " + new Date() + " in " + (Time.monotonicNow() - startTime + " milliseconds")); } catch (Exception e) { String errMsg = "Fsck " + e.getMessage(); LOG.warn(errMsg, e); out.println("Federated FSCK ended at " + new Date() + " in " + (Time.monotonicNow() - startTime + " milliseconds")); out.println(e.getMessage()); out.print("\n\n" + errMsg); } finally { out.close(); } }
@Test public void testFsck() throws Exception { MountTable addEntry = MountTable.newInstance("/testdir", Collections.singletonMap("ns0", "/testdir")); assertTrue(addMountTable(addEntry)); addEntry = MountTable.newInstance("/testdir2", Collections.singletonMap("ns1", "/testdir2")); assertTrue(addMountTable(addEntry)); // create 1 file on ns0 routerFs.createNewFile(new Path("/testdir/testfile")); // create 3 files on ns1 routerFs.createNewFile(new Path("/testdir2/testfile2")); routerFs.createNewFile(new Path("/testdir2/testfile3")); routerFs.createNewFile(new Path("/testdir2/testfile4")); try (CloseableHttpClient httpClient = HttpClients.createDefault()) { // TODO: support https HttpGet httpGet = new HttpGet("http://" + webAddress.getHostName() + ":" + webAddress.getPort() + "/fsck"); try (CloseableHttpResponse httpResponse = httpClient.execute(httpGet)) { assertEquals(HttpStatus.SC_OK, httpResponse.getStatusLine().getStatusCode()); String out = EntityUtils.toString( httpResponse.getEntity(), StandardCharsets.UTF_8); LOG.info(out); assertTrue(out.contains("Federated FSCK started")); // assert 1 file exists in a cluster and 3 files exist // in another cluster assertTrue(out.contains("Total files:\t1")); assertTrue(out.contains("Total files:\t3")); assertTrue(out.contains("Federated FSCK ended")); int nnCount = 0; for (MembershipState nn : memberships) { if (nn.getState() == FederationNamenodeServiceState.ACTIVE) { assertTrue(out.contains( "Checking " + nn + " at " + nn.getWebAddress() + "\n")); nnCount++; } } assertEquals(2, nnCount); } // check if the argument is passed correctly httpGet = new HttpGet("http://" + webAddress.getHostName() + ":" + webAddress.getPort() + "/fsck?path=/testdir"); try (CloseableHttpResponse httpResponse = httpClient.execute(httpGet)) { assertEquals(HttpStatus.SC_OK, httpResponse.getStatusLine().getStatusCode()); String out = EntityUtils.toString( httpResponse.getEntity(), StandardCharsets.UTF_8); LOG.info(out); assertTrue(out.contains("Federated FSCK started")); assertTrue(out.contains("Total files:\t1")); // ns1 does not have files under /testdir assertFalse(out.contains("Total files:\t3")); assertTrue(out.contains("Federated FSCK ended")); int nnCount = 0; for (MembershipState nn : memberships) { if (nn.getState() == FederationNamenodeServiceState.ACTIVE) { assertTrue(out.contains( "Checking " + nn + " at " + nn.getWebAddress() + "\n")); nnCount++; } } assertEquals(2, nnCount); } } }
@Override public int getOrder() { return PluginEnum.RESPONSE.getCode(); }
@Test public void testGetOrder() { assertEquals(responsePlugin.getOrder(), PluginEnum.RESPONSE.getCode()); }
@Override public void close() { close(Duration.ofMillis(DEFAULT_CLOSE_TIMEOUT_MS)); }
@Test public void testSuccessfulStartupShutdown() { consumer = newConsumer(); completeShareAcknowledgeOnCloseApplicationEventSuccessfully(); completeShareUnsubscribeApplicationEventSuccessfully(); assertDoesNotThrow(() -> consumer.close()); }
@Override public boolean offerLast(T t) { addLastNode(t); return true; }
@Test public void testOfferLastNull() { LinkedDeque<Object> q = new LinkedDeque<>(); try { q.offerLast(null); Assert.fail("offerLast null should have failed"); } catch (NullPointerException e) { // expected } }
public Map<String, Gauge<Long>> gauges() { Map<String, Gauge<Long>> gauges = new HashMap<>(); final TrafficCounter tc = trafficCounter(); gauges.put(READ_BYTES_1_SEC, new Gauge<Long>() { @Override public Long getValue() { return tc.lastReadBytes(); } }); gauges.put(WRITTEN_BYTES_1_SEC, new Gauge<Long>() { @Override public Long getValue() { return tc.lastWrittenBytes(); } }); gauges.put(READ_BYTES_TOTAL, new Gauge<Long>() { @Override public Long getValue() { return tc.cumulativeReadBytes(); } }); gauges.put(WRITTEN_BYTES_TOTAL, new Gauge<Long>() { @Override public Long getValue() { return tc.cumulativeWrittenBytes(); } }); return gauges; }
@Test public void counterReturns4Gauges() { assertThat(throughputCounter.gauges()).hasSize(4); }
@Override public void updateNotice(NoticeSaveReqVO updateReqVO) { // 校验是否存在 validateNoticeExists(updateReqVO.getId()); // 更新通知公告 NoticeDO updateObj = BeanUtils.toBean(updateReqVO, NoticeDO.class); noticeMapper.updateById(updateObj); }
@Test public void testUpdateNotice_success() { // 插入前置数据 NoticeDO dbNoticeDO = randomPojo(NoticeDO.class); noticeMapper.insert(dbNoticeDO); // 准备更新参数 NoticeSaveReqVO reqVO = randomPojo(NoticeSaveReqVO.class, o -> o.setId(dbNoticeDO.getId())); // 更新 noticeService.updateNotice(reqVO); // 检验是否更新成功 NoticeDO notice = noticeMapper.selectById(reqVO.getId()); assertPojoEquals(reqVO, notice); }
public double maxQueryGrowthRate(Duration window, Instant now) { if (snapshots.isEmpty()) return 0.1; // Find the period having the highest growth rate, where total growth exceeds 30% increase double maxGrowthRate = 0; // In query rate growth per second (to get good resolution) for (int start = 0; start < snapshots.size(); start++) { if (start > 0) { // Optimization: Skip this point when starting from the previous is better relative to the best rate so far Duration duration = durationBetween(start - 1, start); if (duration.toSeconds() != 0) { double growthRate = (queryRateAt(start - 1) - queryRateAt(start)) / duration.toSeconds(); if (growthRate >= maxGrowthRate) continue; } } // Find a subsequent snapshot where the query rate has increased significantly for (int end = start + 1; end < snapshots.size(); end++) { Duration duration = durationBetween(start, end); if (duration.toSeconds() == 0) continue; if (duration.compareTo(GROWTH_RATE_MIN_INTERVAL) < 0) continue; // Too short period to be considered if (significantGrowthBetween(start, end)) { double growthRate = (queryRateAt(end) - queryRateAt(start)) / duration.toSeconds(); if (growthRate > maxGrowthRate) maxGrowthRate = growthRate; } } } if (maxGrowthRate == 0) { // No periods of significant growth if (durationBetween(0, snapshots.size() - 1).toHours() < 24) return 0.1; // ... because not much data else return 0.0; // ... because load is stable } OptionalDouble queryRate = queryRate(window, now); if (queryRate.orElse(0) == 0) return 0.1; // Growth not expressible as a fraction of the current rate return maxGrowthRate * 60 / queryRate.getAsDouble(); }
@Test public void test_real_data() { // Here we use real data from a production deployment, where significant query rate growth is measured over // a short period (ts0). This should not cause a significant difference in max growth rate compared to a node // measuring approximately the same growth Instant scaleAt = Instant.parse("2024-07-03T06:06:57Z"); ClusterTimeseries ts0 = new ClusterTimeseries(cluster, readSnapshots("real-traffic-fast-growth")); double maxGrowthRate0 = ts0.maxQueryGrowthRate(Duration.ofMinutes(5), scaleAt); assertEquals(0.0896, maxGrowthRate0, delta); ClusterTimeseries ts1 = new ClusterTimeseries(cluster, readSnapshots("real-traffic-ordinary-growth")); double maxGrowthRate1 = ts1.maxQueryGrowthRate(Duration.ofMinutes(5), scaleAt); assertEquals(0.0733, maxGrowthRate1, delta); assertTrue(maxGrowthRate0 - maxGrowthRate1 < 0.1); }
@Override public Optional<PersistentQueryMetadata> getPersistentQuery(final QueryId queryId) { return Optional.ofNullable(persistentQueries.get(queryId)); }
@Test public void shouldCallListenerOnStateChange() { // Given: final QueryMetadata.Listener queryListener = givenCreateGetListener(registry, "foo"); final QueryMetadata query = registry.getPersistentQuery(new QueryId("foo")).get(); // When: queryListener.onStateChange(query, State.CREATED, State.RUNNING); // Then: verify(listener1).onStateChange(query, State.CREATED, State.RUNNING); verify(listener2).onStateChange(query, State.CREATED, State.RUNNING); }
@Override public LinkEvent createOrUpdateLink(ProviderId providerId, LinkDescription linkDescription) { final DeviceId dstDeviceId = linkDescription.dst().deviceId(); final NodeId dstNodeId = mastershipService.getMasterFor(dstDeviceId); // Process link update only if we're the master of the destination node, // otherwise signal the actual master. if (clusterService.getLocalNode().id().equals(dstNodeId)) { LinkKey linkKey = linkKey(linkDescription.src(), linkDescription.dst()); Provided<LinkKey> internalLinkKey = getProvided(linkKey, providerId); if (internalLinkKey == null) { return null; } linkDescriptions.compute(internalLinkKey, (k, v) -> createOrUpdateLinkInternal(v, linkDescription)); return refreshLinkCache(linkKey); } else { // Only forward for ConfigProvider or NullProvider // Forwarding was added as a workaround for ONOS-490 if (!"cfg".equals(providerId.scheme()) && !"null".equals(providerId.scheme())) { return null; } // Temporary hack for NPE (ONOS-1171). // Proper fix is to implement forwarding to master on ConfigProvider if (dstNodeId == null) { return null; } return Futures.getUnchecked(clusterCommunicator.sendAndReceive(new Provided<>(linkDescription, providerId), LINK_INJECT_MESSAGE, SERIALIZER::encode, SERIALIZER::decode, dstNodeId)); } }
@Test public final void testCreateOrUpdateLinkAncillary() { ConnectPoint src = new ConnectPoint(DID1, P1); ConnectPoint dst = new ConnectPoint(DID2, P2); // add Ancillary link LinkEvent event = linkStore.createOrUpdateLink(PIDA, new DefaultLinkDescription(src, dst, INDIRECT, A1)); assertNotNull("Ancillary only link is ignored", event); // add Primary link LinkEvent event2 = linkStore.createOrUpdateLink(PID, new DefaultLinkDescription(src, dst, INDIRECT, A2)); assertLink(DID1, P1, DID2, P2, INDIRECT, event2.subject()); assertAnnotationsEquals(event2.subject().annotations(), A2, A1); assertEquals(LINK_UPDATED, event2.type()); // update link type LinkEvent event3 = linkStore.createOrUpdateLink(PID, new DefaultLinkDescription(src, dst, DIRECT, A2)); assertLink(DID1, P1, DID2, P2, DIRECT, event3.subject()); assertAnnotationsEquals(event3.subject().annotations(), A2, A1); assertEquals(LINK_UPDATED, event3.type()); // no change LinkEvent event4 = linkStore.createOrUpdateLink(PID, new DefaultLinkDescription(src, dst, DIRECT)); assertNull("No change event expected", event4); // update link annotation (Primary) LinkEvent event5 = linkStore.createOrUpdateLink(PID, new DefaultLinkDescription(src, dst, DIRECT, A2_2)); assertLink(DID1, P1, DID2, P2, DIRECT, event5.subject()); assertAnnotationsEquals(event5.subject().annotations(), A2, A2_2, A1); assertEquals(LINK_UPDATED, event5.type()); // update link annotation (Ancillary) LinkEvent event6 = linkStore.createOrUpdateLink(PIDA, new DefaultLinkDescription(src, dst, DIRECT, A1_2)); assertLink(DID1, P1, DID2, P2, DIRECT, event6.subject()); assertAnnotationsEquals(event6.subject().annotations(), A2, A2_2, A1, A1_2); assertEquals(LINK_UPDATED, event6.type()); // update link type (Ancillary) : ignored LinkEvent event7 = linkStore.createOrUpdateLink(PIDA, new DefaultLinkDescription(src, dst, EDGE)); assertNull("Ancillary change other than annotation is ignored", event7); }
public Node parse() throws ScanException { if (tokenList == null || tokenList.isEmpty()) return null; return E(); }
@Test public void withNoClosingBraces() throws ScanException { Tokenizer tokenizer = new Tokenizer("a${b"); Parser parser = new Parser(tokenizer.tokenize()); try { parser.parse(); } catch (IllegalArgumentException e) { assertEquals("All tokens consumed but was expecting \"}\"", e.getMessage()); return; } fail(); }
@Override public Result apply(String action, Class<? extends Validatable> aClass, String resource, String resourceToOperateWithin) { if (matchesAction(action) && matchesType(aClass) && matchesResource(resource)) { return Result.DENY; } if (isRequestForElasticAgentProfiles(aClass) && matchesAction(action) && matchesResource(resourceToOperateWithin)) { return Result.DENY; } return Result.SKIP; }
@Test void forAdministerOfAllClusterProfiles() { Deny directive = new Deny("administer", "cluster_profile", "*"); Result viewAllElasticAgentProfiles = directive.apply("view", ElasticProfile.class, "*", null); Result viewAllClusterProfiles = directive.apply("view", ClusterProfile.class, "*", null); Result administerAllElasticAgentProfiles = directive.apply("administer", ElasticProfile.class, "*", null); Result administerAllClusterProfiles = directive.apply("administer", ClusterProfile.class, "*", null); assertThat(viewAllElasticAgentProfiles).isEqualTo(Result.DENY); assertThat(viewAllClusterProfiles).isEqualTo(Result.DENY); assertThat(administerAllElasticAgentProfiles).isEqualTo(Result.DENY); assertThat(administerAllClusterProfiles).isEqualTo(Result.DENY); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testMapPutIfAbsentNoReadFails() throws Exception { StateTag<MapState<String, Integer>> addr = StateTags.map("map", StringUtf8Coder.of(), VarIntCoder.of()); MapState<String, Integer> mapState = underTest.state(NAMESPACE, addr); final String tag1 = "tag1"; mapState.put(tag1, 1); ReadableState<Integer> readableState = mapState.putIfAbsent(tag1, 42); assertEquals(1, (int) mapState.get(tag1).read()); assertEquals(1, (int) readableState.read()); final String tag2 = "tag2"; SettableFuture<Integer> future = SettableFuture.create(); when(mockReader.valueFuture( protoKeyFromUserKey(tag2, StringUtf8Coder.of()), STATE_FAMILY, VarIntCoder.of())) .thenReturn(future); waitAndSet(future, 2, 50); readableState = mapState.putIfAbsent(tag2, 42); assertEquals(2, (int) mapState.get(tag2).read()); assertEquals(2, (int) readableState.read()); }
@Override public Collection<String> childNames() { return Arrays.asList(ClusterImageBrokersNode.NAME, ClusterImageControllersNode.NAME); }
@Test public void testChildNames() { assertEquals(Arrays.asList("brokers", "controllers"), NODE.childNames()); }
public FEELFnResult<String> invoke(@ParameterName("from") Object val) { if ( val == null ) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( TypeUtil.formatValue(val, false) ); } }
@Test void invokeDurationNanosMillis() { FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ofNanos(25)), "PT0.000000025S"); FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ofNanos(10000)), "PT0.00001S"); FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ofNanos(10025)), "PT0.000010025S"); FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ofMillis(1500)), "PT1.5S"); FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ofMillis(90061025)), "P1DT1H1M1.025S"); FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ofMillis(-90061025)), "-P1DT1H1M1.025S"); }
public static VersionRange parse(String rangeString) { validateRangeString(rangeString); Inclusiveness minVersionInclusiveness = rangeString.startsWith("[") ? Inclusiveness.INCLUSIVE : Inclusiveness.EXCLUSIVE; Inclusiveness maxVersionInclusiveness = rangeString.endsWith("]") ? Inclusiveness.INCLUSIVE : Inclusiveness.EXCLUSIVE; int commaIndex = rangeString.indexOf(','); String minVersionString = rangeString.substring(1, commaIndex).trim(); Version minVersion; if (minVersionString.isEmpty()) { minVersionInclusiveness = Inclusiveness.EXCLUSIVE; minVersion = Version.minimum(); } else { minVersion = Version.fromString(minVersionString); } String maxVersionString = rangeString.substring(commaIndex + 1, rangeString.length() - 1).trim(); Version maxVersion; if (maxVersionString.isEmpty()) { maxVersionInclusiveness = Inclusiveness.EXCLUSIVE; maxVersion = Version.maximum(); } else { maxVersion = Version.fromString(maxVersionString); } if (!minVersion.isLessThan(maxVersion)) { throw new IllegalArgumentException( String.format( "Min version in range must be less than max version in range, got '%s'", rangeString)); } return builder() .setMinVersion(minVersion) .setMinVersionInclusiveness(minVersionInclusiveness) .setMaxVersion(maxVersion) .setMaxVersionInclusiveness(maxVersionInclusiveness) .build(); }
@Test public void parse_withTheSameRangeEnds_throwsIllegalArgumentException() { IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> VersionRange.parse("[1.0,1.0]")); assertThat(exception) .hasMessageThat() .isEqualTo("Min version in range must be less than max version in range, got '[1.0,1.0]'"); }
public static String toJson(MetadataUpdate metadataUpdate) { return toJson(metadataUpdate, false); }
@Test public void testSetLocationToJson() { String action = MetadataUpdateParser.SET_LOCATION; String location = "s3://bucket/warehouse/tbl_location"; String expected = String.format("{\"action\":\"%s\",\"location\":\"%s\"}", action, location); MetadataUpdate update = new MetadataUpdate.SetLocation(location); String actual = MetadataUpdateParser.toJson(update); assertThat(actual) .as("Remove properties should serialize to the correct JSON value") .isEqualTo(expected); }
@Override public DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options) { // Partition the requested config resources based on which broker they must be sent to with the // null broker being used for config resources which can be obtained from any broker final Map<Integer, Map<ConfigResource, KafkaFutureImpl<Config>>> nodeFutures = new HashMap<>(configResources.size()); for (ConfigResource resource : configResources) { Integer broker = nodeFor(resource); nodeFutures.compute(broker, (key, value) -> { if (value == null) { value = new HashMap<>(); } value.put(resource, new KafkaFutureImpl<>()); return value; }); } final long now = time.milliseconds(); for (Map.Entry<Integer, Map<ConfigResource, KafkaFutureImpl<Config>>> entry : nodeFutures.entrySet()) { final Integer node = entry.getKey(); Map<ConfigResource, KafkaFutureImpl<Config>> unified = entry.getValue(); runnable.call(new Call("describeConfigs", calcDeadlineMs(now, options.timeoutMs()), node != null ? new ConstantNodeIdProvider(node, true) : new LeastLoadedBrokerOrActiveKController()) { @Override DescribeConfigsRequest.Builder createRequest(int timeoutMs) { return new DescribeConfigsRequest.Builder(new DescribeConfigsRequestData() .setResources(unified.keySet().stream() .map(config -> new DescribeConfigsRequestData.DescribeConfigsResource() .setResourceName(config.name()) .setResourceType(config.type().id()) .setConfigurationKeys(null)) .collect(Collectors.toList())) .setIncludeSynonyms(options.includeSynonyms()) .setIncludeDocumentation(options.includeDocumentation())); } @Override void handleResponse(AbstractResponse abstractResponse) { DescribeConfigsResponse response = (DescribeConfigsResponse) abstractResponse; for (Map.Entry<ConfigResource, DescribeConfigsResponseData.DescribeConfigsResult> entry : response.resultMap().entrySet()) { ConfigResource configResource = entry.getKey(); DescribeConfigsResponseData.DescribeConfigsResult describeConfigsResult = entry.getValue(); KafkaFutureImpl<Config> future = unified.get(configResource); if (future == null) { if (node != null) { log.warn("The config {} in the response from node {} is not in the request", configResource, node); } else { log.warn("The config {} in the response from the least loaded broker is not in the request", configResource); } } else { if (describeConfigsResult.errorCode() != Errors.NONE.code()) { future.completeExceptionally(Errors.forCode(describeConfigsResult.errorCode()) .exception(describeConfigsResult.errorMessage())); } else { future.complete(describeConfigResult(describeConfigsResult)); } } } completeUnrealizedFutures( unified.entrySet().stream(), configResource -> "The node response did not contain a result for config resource " + configResource); } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(unified.values(), throwable); } }, now); } return new DescribeConfigsResult( nodeFutures.entrySet() .stream() .flatMap(x -> x.getValue().entrySet().stream()) .collect(Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> { // Duplicate keys should not occur, throw an exception to signal this issue throw new IllegalStateException(String.format("Duplicate key for values: %s and %s", oldValue, newValue)); }, HashMap::new )) ); }
@Test public void testDescribeConfigsPartialResponse() { ConfigResource topic = new ConfigResource(ConfigResource.Type.TOPIC, "topic"); ConfigResource topic2 = new ConfigResource(ConfigResource.Type.TOPIC, "topic2"); try (AdminClientUnitTestEnv env = mockClientEnv()) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse(new DescribeConfigsResponse( new DescribeConfigsResponseData().setResults(singletonList(new DescribeConfigsResponseData.DescribeConfigsResult() .setResourceName(topic.name()).setResourceType(topic.type().id()).setErrorCode(Errors.NONE.code()) .setConfigs(emptyList()))))); Map<ConfigResource, KafkaFuture<Config>> result = env.adminClient().describeConfigs(asList( topic, topic2)).values(); assertEquals(new HashSet<>(asList(topic, topic2)), result.keySet()); result.get(topic); TestUtils.assertFutureThrows(result.get(topic2), ApiException.class); } }
@Override public Processor<K, Change<V>, KO, SubscriptionWrapper<K>> get() { return new UnbindChangeProcessor(); }
@Test public void leftJoinShouldPropagateChangeFromNullFKToNonNullFKValue() { final MockInternalNewProcessorContext<String, SubscriptionWrapper<String>> context = new MockInternalNewProcessorContext<>(); leftJoinProcessor.init(context); context.setRecordMetadata("topic", 0, 0); final LeftValue leftRecordValue = new LeftValue(fk1); leftJoinProcessor.process(new Record<>(pk, new Change<>(leftRecordValue, new LeftValue(null)), 0)); assertThat(context.forwarded().size(), is(1)); assertThat( context.forwarded().get(0).record(), is(new Record<>(fk1, new SubscriptionWrapper<>(hash(leftRecordValue), PROPAGATE_NULL_IF_NO_FK_VAL_AVAILABLE, pk, 0), 0)) ); }
public Map<String, String> build() { Map<String, String> builder = new HashMap<>(); configureFileSystem(builder); configureNetwork(builder); configureCluster(builder); configureSecurity(builder); configureOthers(builder); LOGGER.info("Elasticsearch listening on [HTTP: {}:{}, TCP: {}:{}]", builder.get(ES_HTTP_HOST_KEY), builder.get(ES_HTTP_PORT_KEY), builder.get(ES_TRANSPORT_HOST_KEY), builder.get(ES_TRANSPORT_PORT_KEY)); return builder; }
@Test public void set_discovery_settings_if_cluster_is_enabled() throws Exception { Props props = minProps(CLUSTER_ENABLED); props.set(CLUSTER_ES_HOSTS.getKey(), "1.2.3.4:9000,1.2.3.5:8080"); Map<String, String> settings = new EsSettings(props, new EsInstallation(props), system).build(); assertThat(settings) .containsEntry("discovery.seed_hosts", "1.2.3.4:9000,1.2.3.5:8080") .containsEntry("discovery.initial_state_timeout", "120s"); }
@Operation(summary = "remove the connection") @DeleteMapping(value = "{id}") public void remove(@PathVariable("id") Long id) { Connection connection = connectionService.getConnectionById(id); connectionService.deleteConnectionById(connection); }
@Test public void removeConnection() { when(connectionServiceMock.getConnectionById(anyLong())).thenReturn(getNewConnection()); controllerMock.remove(1L); verify(connectionServiceMock, times(1)).deleteConnectionById(any(Connection.class)); verify(connectionServiceMock, times(1)).getConnectionById(anyLong()); }
static Boolean andOperator(Boolean aBoolean, Boolean aBoolean2) { logger.trace("andOperator {} {}", aBoolean, aBoolean2); return aBoolean != null ? aBoolean && aBoolean2 : aBoolean2; }
@Test void andOperator() { Boolean aBoolean = null; boolean aBoolean2 = true; assertThat(KiePMMLCompoundPredicate.andOperator(aBoolean, aBoolean2)).isTrue(); aBoolean2 = false; assertThat(KiePMMLCompoundPredicate.andOperator(aBoolean, aBoolean2)).isFalse(); aBoolean = false; aBoolean2 = false; assertThat(KiePMMLCompoundPredicate.andOperator(aBoolean, aBoolean2)).isFalse(); aBoolean = true; aBoolean2 = false; assertThat(KiePMMLCompoundPredicate.andOperator(aBoolean, aBoolean2)).isFalse(); aBoolean = false; aBoolean2 = true; assertThat(KiePMMLCompoundPredicate.andOperator(aBoolean, aBoolean2)).isFalse(); aBoolean = true; aBoolean2 = true; assertThat(KiePMMLCompoundPredicate.andOperator(aBoolean, aBoolean2)).isTrue(); }
public boolean isEnabled() { return enabled; }
@Test public void test_constructor_enabledDefaultBehavior() { ClientTpcConfig config = new ClientTpcConfig(); assertFalse(config.isEnabled()); System.setProperty("hazelcast.client.tpc.enabled", "true"); config = new ClientTpcConfig(); assertTrue(config.isEnabled()); System.setProperty("hazelcast.client.tpc.enabled", "false"); config = new ClientTpcConfig(); assertFalse(config.isEnabled()); }
abstract List<String> parseJobID() throws IOException;
@Test public void testParseJar() throws IOException { String errFileName = "src/test/data/status/jar"; JarJobIDParser jarJobIDParser = new JarJobIDParser(errFileName, new Configuration()); List<String> jobs = jarJobIDParser.parseJobID(); Assert.assertEquals(jobs.size(), 1); }
@ScalarOperator(CAST) @SqlType(StandardTypes.INTEGER) public static long castToInteger(@SqlType(StandardTypes.REAL) long value) { try { return DoubleMath.roundToInt(intBitsToFloat((int) value), HALF_UP); } catch (ArithmeticException e) { throw new PrestoException(INVALID_CAST_ARGUMENT, format("Unable to cast %s to integer", intBitsToFloat((int) value)), e); } }
@Test public void testCastToInteger() { assertFunction("CAST(REAL'754.2008' AS INTEGER)", INTEGER, 754); assertFunction("CAST(REAL'-754.1985' AS INTEGER)", INTEGER, -754); assertFunction("CAST(REAL'9.99' AS INTEGER)", INTEGER, 10); assertFunction("CAST(REAL'-0.0' AS INTEGER)", INTEGER, 0); assertFunction("cast(REAL '" + Math.nextDown(0x1.0p31f) + "' as integer)", INTEGER, (int) Math.nextDown(0x1.0p31f)); assertInvalidFunction("cast(REAL '" + 0x1.0p31 + "' as integer)", INVALID_CAST_ARGUMENT); assertInvalidFunction("cast(REAL '" + Math.nextUp(0x1.0p31f) + "' as integer)", INVALID_CAST_ARGUMENT); assertInvalidFunction("cast(REAL '" + Math.nextDown(-0x1.0p31f) + "' as integer)", INVALID_CAST_ARGUMENT); assertFunction("cast(REAL '" + -0x1.0p31 + "' as integer)", INTEGER, (int) -0x1.0p31); assertFunction("cast(REAL '" + Math.nextUp(-0x1.0p31f) + "' as integer)", INTEGER, (int) Math.nextUp(-0x1.0p31f)); assertInvalidFunction("cast(9.3E9 as integer)", INVALID_CAST_ARGUMENT, "Unable to cast 9.3E9 to integer"); assertInvalidFunction("cast(-9.3E9 as integer)", INVALID_CAST_ARGUMENT, "Unable to cast -9.3E9 to integer"); assertInvalidFunction("CAST(cast(nan() AS REAL) as INTEGER)", INVALID_CAST_ARGUMENT, "Unable to cast NaN to integer"); assertInvalidFunction("CAST(cast(infinity() AS REAL) as INTEGER)", INVALID_CAST_ARGUMENT, "Unable to cast Infinity to integer"); assertInvalidFunction("CAST(cast(-infinity() AS REAL) as INTEGER)", INVALID_CAST_ARGUMENT, "Unable to cast -Infinity to integer"); assertInvalidFunction("CAST(REAL '" + (Integer.MAX_VALUE + 0.6) + "' as INTEGER)", INVALID_CAST_ARGUMENT, "Unable to cast 2.14748365E9 to integer"); }
@POST @Path("/{connector}/tasks/{task}/restart") @Operation(summary = "Restart the specified task for the specified connector") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @Context HttpHeaders headers, final @Parameter(hidden = true) @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); requestHandler.completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", headers, null, new TypeReference<Void>() { }, forward); }
@Test public void testRestartTaskLeaderRedirect() throws Throwable { ConnectorTaskId taskId = new ConnectorTaskId(CONNECTOR_NAME, 0); final ArgumentCaptor<Callback<Void>> cb = ArgumentCaptor.forClass(Callback.class); expectAndCallbackNotLeaderException(cb).when(herder) .restartTask(eq(taskId), cb.capture()); when(restClient.httpRequest(eq(LEADER_URL + "connectors/" + CONNECTOR_NAME + "/tasks/0/restart?forward=true"), eq("POST"), isNull(), isNull(), any())) .thenReturn(new RestClient.HttpResponse<>(202, new HashMap<>(), null)); connectorsResource.restartTask(CONNECTOR_NAME, 0, NULL_HEADERS, null); }
public FEELFnResult<BigDecimal> invoke(@ParameterName("from") String from, @ParameterName("grouping separator") String group, @ParameterName("decimal separator") String decimal) { if ( from == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null")); } if ( group != null && !group.equals( " " ) && !group.equals( "." ) && !group.equals( "," ) ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "group", "not a valid one, can only be one of: dot ('.'), comma (','), space (' ') ")); } if ( decimal != null ) { if (!decimal.equals( "." ) && !decimal.equals( "," )) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "not a valid one, can only be one of: dot ('.'), comma (',') ")); } else if (group != null && decimal.equals( group )) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "decimal", "cannot be the same as parameter 'group' ")); } } if ( group != null ) { from = from.replaceAll( "\\" + group, "" ); } if ( decimal != null ) { from = from.replaceAll( "\\" + decimal, "." ); } BigDecimal result = NumberEvalHelper.getBigDecimalOrNull(from ); if( from != null && result == null ) { // conversion failed return FEELFnResult.ofError( new InvalidParametersEvent(Severity.ERROR, "unable to calculate final number result" ) ); } else { return FEELFnResult.ofResult( result ); } }
@Test void invokeNumberWithGroupCharDot() { FunctionTestUtil.assertResult(numberFunction.invoke("9.876", ".", null), BigDecimal.valueOf(9876)); FunctionTestUtil.assertResult(numberFunction.invoke("9.876.000", ".", null), BigDecimal.valueOf(9876000)); }
@Override public double quantile(double p) { if (p < 0.0 || p > 1.0) { throw new IllegalArgumentException("Invalid p: " + p); } int n = (int) Math.max(Math.sqrt(1 / this.p), 5.0); int nl, nu, inc = 1; if (p < cdf(n)) { do { n = Math.max(n - inc, 0); inc *= 2; } while (p < cdf(n)); nl = n; nu = n + inc / 2; } else { do { n += inc; inc *= 2; } while (p > cdf(n)); nu = n; nl = n - inc / 2; } return quantile(p, nl, nu); }
@Test public void testQuantile() { System.out.println("quantile"); ShiftedGeometricDistribution instance = new ShiftedGeometricDistribution(0.3); instance.rand(); assertEquals(1, instance.quantile(0.01), 1E-6); assertEquals(1, instance.quantile(0.1), 1E-6); assertEquals(1, instance.quantile(0.2), 1E-6); assertEquals(1, instance.quantile(0.3), 1E-6); assertEquals(2, instance.quantile(0.4), 1E-6); assertEquals(3, instance.quantile(0.6), 1E-6); assertEquals(5, instance.quantile(0.8), 1E-6); assertEquals(7, instance.quantile(0.9), 1E-6); assertEquals(13, instance.quantile(0.99), 1E-6); }
@Override public Path mkdir(final Path folder, final TransferStatus status) throws BackgroundException { try { if(containerService.isContainer(folder)) { final Storage.Buckets.Insert request = session.getClient().buckets().insert(session.getHost().getCredentials().getUsername(), new Bucket() .setLocation(status.getRegion()) .setStorageClass(status.getStorageClass()) .setName(containerService.getContainer(folder).getName())); final Bucket bucket = request.execute(); final EnumSet<Path.Type> type = EnumSet.copyOf(folder.getType()); type.add(Path.Type.volume); return folder.withType(type).withAttributes(new GoogleStorageAttributesFinderFeature(session).toAttributes(bucket)); } else { final EnumSet<Path.Type> type = EnumSet.copyOf(folder.getType()); type.add(Path.Type.placeholder); // Add placeholder object return new GoogleStorageTouchFeature(session).withWriter(writer).touch(folder.withType(type), status.withMime(MIMETYPE)); } } catch(IOException e) { throw new GoogleStorageExceptionMappingService().map("Cannot create folder {0}", e, folder); } }
@Test public void testCreatePlaceholderVersioningDeleteWithMarker() throws Exception { final Path bucket = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new GoogleStorageDirectoryFeature(session).mkdir(new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); assertTrue(test.getType().contains(Path.Type.placeholder)); assertTrue(new GoogleStorageFindFeature(session).find(test)); assertTrue(new GoogleStorageObjectListService(session).list(bucket, new DisabledListProgressListener()).contains(test)); // Add delete marker new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(new Path(test).withAttributes(PathAttributes.EMPTY)), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertFalse(new DefaultFindFeature(session).find(new Path(test).withAttributes(PathAttributes.EMPTY))); assertFalse(new GoogleStorageFindFeature(session).find(new Path(test).withAttributes(PathAttributes.EMPTY))); assertTrue(new DefaultFindFeature(session).find(test)); assertTrue(new GoogleStorageFindFeature(session).find(test)); }
public LazyMapEntry init(InternalSerializationService serializationService, Object key, Object value, Extractors extractors, long ttl, boolean changeExpiryOnUpdate) { super.init(serializationService, key, value, extractors); this.modified = false; this.newTtl = ttl; this.changeExpiryOnUpdate = changeExpiryOnUpdate; return this; }
@Test public void test_init() { Data keyData = serializationService.toData("keyData"); Data valueData = serializationService.toData("valueData"); entry.init(serializationService, keyData, valueData, null); Object valueObject = entry.getValue(); entry.init(serializationService, keyData, valueObject, null); assertTrue("Old valueData should not be here", valueData != entry.getValueData()); }
@Override protected File getFile(HandlerRequest<EmptyRequestBody> handlerRequest) { if (logDir == null) { return null; } // wrapping around another File instantiation is a simple way to remove any path information // - we're // solely interested in the filename String filename = new File(handlerRequest.getPathParameter(LogFileNamePathParameter.class)).getName(); return new File(logDir, filename); }
@Test void testGetJobManagerCustomLogsNotExistingFile() throws Exception { File actualFile = testInstance.getFile(createHandlerRequest("not-existing")); assertThat(actualFile).isNotNull().doesNotExist(); }
@Override public void store(Measure newMeasure) { saveMeasure(newMeasure.inputComponent(), (DefaultMeasure<?>) newMeasure); }
@Test public void shouldIgnoreMeasuresOnModules() throws IOException { ProjectDefinition module = ProjectDefinition.create().setBaseDir(temp.newFolder()).setWorkDir(temp.newFolder()); ProjectDefinition root = ProjectDefinition.create().addSubProject(module); underTest.store(new DefaultMeasure() .on(new DefaultInputModule(module)) .forMetric(CoreMetrics.LINES) .withValue(10)); verifyNoMoreInteractions(reportPublisher); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) { String value = way.getTag("hgv:conditional", ""); int index = value.indexOf("@"); Hgv hgvValue = index > 0 && conditionalWeightToTons(value) == 3.5 ? Hgv.find(value.substring(0, index).trim()) : Hgv.find(way.getTag("hgv")); hgvEnc.setEnum(false, edgeId, edgeIntAccess, hgvValue); }
@Test public void testSimpleTags() { EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1); ReaderWay readerWay = new ReaderWay(1); readerWay.setTag("highway", "primary"); readerWay.setTag("hgv", "destination"); parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(Hgv.DESTINATION, hgvEnc.getEnum(false, edgeId, edgeIntAccess)); }
@Override public Map<TopicPartition, Long> beginningOffsets(Collection<TopicPartition> partitions) { return beginningOffsets(partitions, Duration.ofMillis(defaultApiTimeoutMs)); }
@Test public void testBeginningOffsetsTimeoutException() { consumer = newConsumer(); long timeout = 100; doThrow(new TimeoutException("Event did not complete in time and was expired by the reaper")) .when(applicationEventHandler).addAndGet(any()); Throwable t = assertThrows( TimeoutException.class, () -> consumer.beginningOffsets(Collections.singleton(new TopicPartition("topic", 5)), Duration.ofMillis(timeout))); assertEquals("Failed to get offsets by times in " + timeout + "ms", t.getMessage()); }
public Input<DefaultIssue> create(Component component) { return new RawLazyInput(component); }
@Test void create_whenSeverityAndTypeNotProvided_shouldTakeFromTheRule() { registerRule(RuleKey.of("external_eslint", "S001"), "rule", r -> { r.setType(RuleType.BUG); r.setSeverity(Severity.MAJOR); }); ScannerReport.ExternalIssue reportIssue = createIssue(null, null); reportReader.putExternalIssues(FILE.getReportAttributes().getRef(), asList(reportIssue)); Input<DefaultIssue> input = underTest.create(FILE); Collection<DefaultIssue> issues = input.getIssues(); assertThat(issues).hasSize(1); DefaultIssue issue = Iterators.getOnlyElement(issues.iterator()); assertThat(issue.type()).isEqualTo(RuleType.BUG); assertThat(issue.severity()).isEqualTo(Severity.MAJOR); }
@Override public boolean add(double score, V object) { return get(addAsync(score, object)); }
@Test public void testSort() { RScoredSortedSet<Integer> set = redisson.getScoredSortedSet("simple"); Assertions.assertTrue(set.add(4, 2)); Assertions.assertTrue(set.add(5, 3)); Assertions.assertTrue(set.add(3, 1)); Assertions.assertTrue(set.add(6, 4)); Assertions.assertTrue(set.add(1000, 10)); Assertions.assertTrue(set.add(1, -1)); Assertions.assertTrue(set.add(2, 0)); assertThat(set).containsExactly(-1, 0, 1, 2, 3, 4, 10); }
public ReportEntry createEntry( final long initialBytesLost, final long timestampMs, final int sessionId, final int streamId, final String channel, final String source) { ReportEntry reportEntry = null; final int requiredCapacity = CHANNEL_OFFSET + BitUtil.align(SIZE_OF_INT + channel.length(), SIZE_OF_INT) + SIZE_OF_INT + source.length(); if (requiredCapacity <= (buffer.capacity() - nextRecordOffset)) { final int offset = nextRecordOffset; buffer.putLong(offset + TOTAL_BYTES_LOST_OFFSET, initialBytesLost); buffer.putLong(offset + FIRST_OBSERVATION_OFFSET, timestampMs); buffer.putLong(offset + LAST_OBSERVATION_OFFSET, timestampMs); buffer.putInt(offset + SESSION_ID_OFFSET, sessionId); buffer.putInt(offset + STREAM_ID_OFFSET, streamId); final int encodedChannelLength = buffer.putStringAscii(offset + CHANNEL_OFFSET, channel); buffer.putStringAscii( offset + CHANNEL_OFFSET + BitUtil.align(encodedChannelLength, SIZE_OF_INT), source); buffer.putLongOrdered(offset + OBSERVATION_COUNT_OFFSET, 1); reportEntry = new ReportEntry(buffer, offset); nextRecordOffset += BitUtil.align(requiredCapacity, ENTRY_ALIGNMENT); } return reportEntry; }
@Test void shouldCreateEntry() { final long initialBytesLost = 32; final int timestampMs = 7; final int sessionId = 3; final int streamId = 1; final String channel = "aeron:udp://stuff"; final String source = "127.0.0.1:8888"; assertNotNull(lossReport.createEntry(initialBytesLost, timestampMs, sessionId, streamId, channel, source)); final InOrder inOrder = inOrder(buffer); inOrder.verify(buffer).putLong(TOTAL_BYTES_LOST_OFFSET, initialBytesLost); inOrder.verify(buffer).putLong(FIRST_OBSERVATION_OFFSET, timestampMs); inOrder.verify(buffer).putLong(LAST_OBSERVATION_OFFSET, timestampMs); inOrder.verify(buffer).putInt(SESSION_ID_OFFSET, sessionId); inOrder.verify(buffer).putInt(STREAM_ID_OFFSET, streamId); inOrder.verify(buffer).putStringAscii(CHANNEL_OFFSET, channel); inOrder.verify(buffer).putStringAscii( CHANNEL_OFFSET + BitUtil.align(SIZE_OF_INT + channel.length(), SIZE_OF_INT), source); inOrder.verify(buffer).putLongOrdered(OBSERVATION_COUNT_OFFSET, 1L); }
@Override public String getRequestURL() { val url = request.getRequestURL().toString(); var idx = url.indexOf('?'); if (idx != -1) { return url.substring(0, idx); } return url; }
@Test public void testGetRequestUrl() throws Exception { when(request.getRequestURL()).thenReturn(new StringBuffer("https://pac4j.org?name=value&name2=value2")); WebContext context = new JEEContext(request, response); assertEquals("https://pac4j.org", context.getRequestURL()); }
public static void mergeParams( Map<String, ParamDefinition> params, Map<String, ParamDefinition> paramsToMerge, MergeContext context) { if (paramsToMerge == null) { return; } Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream()) .forEach( name -> { ParamDefinition paramToMerge = paramsToMerge.get(name); if (paramToMerge == null) { return; } if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) { Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name); Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name); mergeParams( baseMap, toMergeMap, MergeContext.copyWithParentMode( context, params.getOrDefault(name, paramToMerge).getMode())); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else if (paramToMerge.getType() == ParamType.STRING_MAP && paramToMerge.isLiteral()) { Map<String, String> baseMap = stringMapValueOrEmpty(params, name); Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name); baseMap.putAll(toMergeMap); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else { params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, paramToMerge.getValue())); } }); }
@Test public void testAllowedTypeCastingIntoBoolean() throws JsonProcessingException { Map<String, ParamDefinition> allParams = ParamsMergeHelperTest.this.parseParamDefMap( "{'tomerge': {'type': 'BOOLEAN','value': false, 'name': 'tomerge'}}"); Map<String, ParamDefinition> paramsToMerge = ParamsMergeHelperTest.this.parseParamDefMap( "{'tomerge': {'type': 'STRING', 'value': 'trUe', 'name': 'tomerge'}}"); ParamsMergeHelper.mergeParams(allParams, paramsToMerge, definitionContext); assertEquals(1, allParams.size()); assertEquals(Boolean.TRUE, allParams.get("tomerge").asBooleanParamDef().getValue()); }
@ConstantFunction(name = "bitShiftRight", argTypes = {INT, BIGINT}, returnType = INT) public static ConstantOperator bitShiftRightInt(ConstantOperator first, ConstantOperator second) { return ConstantOperator.createInt(first.getInt() >> second.getBigint()); }
@Test public void bitShiftRightInt() { assertEquals(1, ScalarOperatorFunctions.bitShiftRightInt(O_INT_10, O_BI_3).getInt()); }
public static Getter newMethodGetter(Object object, Getter parent, Method method, String modifier) throws Exception { return newGetter(object, parent, modifier, method.getReturnType(), method::invoke, (t, et) -> new MethodGetter(parent, method, modifier, t, et)); }
@Test public void newMethodGetter_whenExtractingFromNonEmpty_Collection_AndReducerSuffixInNotEmpty_thenInferTypeFromCollectionItem() throws Exception { OuterObject object = new OuterObject("name", new InnerObject("inner")); Getter getter = GetterFactory.newMethodGetter(object, null, innersCollectionMethod, "[any]"); Class<?> returnType = getter.getReturnType(); assertEquals(InnerObject.class, returnType); }
public void removeAndFailAll(Throwable cause) { assert executor.inEventLoop(); ObjectUtil.checkNotNull(cause, "cause"); // It is possible for some of the failed promises to trigger more writes. The new writes // will "revive" the queue, so we need to clean them up until the queue is empty. for (PendingWrite write = head; write != null; write = head) { head = tail = null; size = 0; bytes = 0; while (write != null) { PendingWrite next = write.next; ReferenceCountUtil.safeRelease(write.msg); ChannelPromise promise = write.promise; recycle(write, false); safeFail(promise, cause); write = next; } } assertEmpty(); }
@Test public void testRemoveAndFailAll() { assertWriteFails(new TestHandler() { @Override public void flush(ChannelHandlerContext ctx) throws Exception { queue.removeAndFailAll(new TestException()); super.flush(ctx); } }, 3); }
@Override public void checkBeforeUpdate(final AlterEncryptRuleStatement sqlStatement) { checkToBeAlteredRules(sqlStatement); checkColumnNames(sqlStatement); checkToBeAlteredEncryptors(sqlStatement); }
@Test void assertCheckSQLStatementWithoutToBeAlteredEncryptors() { EncryptRule rule = mock(EncryptRule.class); when(rule.getAllTableNames()).thenReturn(Collections.singleton("t_encrypt")); executor.setRule(rule); assertThrows(ServiceProviderNotFoundException.class, () -> executor.checkBeforeUpdate(createSQLStatement("INVALID_TYPE"))); }
@Override public synchronized void unregister(P provider) { checkNotNull(provider, "Provider cannot be null"); S service = services.get(provider.id()); if (service instanceof AbstractProviderService) { ((AbstractProviderService) service).invalidate(); services.remove(provider.id()); providers.remove(provider.id()); if (!provider.id().isAncillary()) { providersByScheme.remove(provider.id().scheme()); } } }
@Test public void voidUnregistration() { TestProviderRegistry registry = new TestProviderRegistry(); registry.unregister(new TestProvider(new ProviderId("of", "foo"))); }
static EndpointConfig endpointConfigFromProperties(HazelcastProperties properties) { EndpointConfig endpointConfig = new EndpointConfig(); endpointConfig.setSocketKeepAlive(properties.getBoolean(ClusterProperty.SOCKET_KEEP_ALIVE)); endpointConfig.setSocketKeepIdleSeconds(properties.getInteger(ClusterProperty.SOCKET_KEEP_IDLE)); endpointConfig.setSocketKeepIntervalSeconds(properties.getInteger(ClusterProperty.SOCKET_KEEP_INTERVAL)); endpointConfig.setSocketKeepCount(properties.getInteger(ClusterProperty.SOCKET_KEEP_COUNT)); endpointConfig.setSocketRcvBufferSizeKb(properties.getInteger(ClusterProperty.SOCKET_RECEIVE_BUFFER_SIZE)); return endpointConfig; }
@Test public void testEndpointConfigFromClusterProperty() { Config config = new Config(); config.setProperty(ClusterProperty.SOCKET_KEEP_ALIVE.getName(), "false"); config.setProperty(ClusterProperty.SOCKET_KEEP_IDLE.getName(), "30"); config.setProperty(ClusterProperty.SOCKET_KEEP_COUNT.getName(), "5"); config.setProperty(ClusterProperty.SOCKET_KEEP_INTERVAL.getName(), "6"); config.setProperty(ClusterProperty.SOCKET_RECEIVE_BUFFER_SIZE.getName(), "512"); HazelcastProperties properties = new HazelcastProperties(config); EndpointConfig endpointConfig = DefaultAddressPicker.endpointConfigFromProperties(properties); assertFalse(endpointConfig.isSocketKeepAlive()); assertEquals(30, endpointConfig.getSocketKeepIdleSeconds()); assertEquals(5, endpointConfig.getSocketKeepCount()); assertEquals(6, endpointConfig.getSocketKeepIntervalSeconds()); assertEquals(512, endpointConfig.getSocketRcvBufferSizeKb()); }