focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public void asyncRequest(Request request, RequestCallBack callback) throws NacosException { int retryTimes = 0; Throwable exceptionToThrow = null; long start = System.currentTimeMillis(); while (retryTimes <= rpcClientConfig.retryTimes() && System.currentTimeMillis() < start + callback .getTimeout()) { boolean waitReconnect = false; try { if (this.currentConnection == null || !isRunning()) { waitReconnect = true; throw new NacosException(NacosException.CLIENT_DISCONNECT, "Client not connected."); } this.currentConnection.asyncRequest(request, callback); return; } catch (Throwable e) { if (waitReconnect) { try { // wait client to reconnect. Thread.sleep(Math.min(100, callback.getTimeout() / 3)); } catch (Exception exception) { // Do nothing. } } LoggerUtils.printIfErrorEnabled(LOGGER, "[{}] Send request fail, request = {}, retryTimes = {}, errorMessage = {}", rpcClientConfig.name(), request, retryTimes, e.getMessage()); exceptionToThrow = e; } retryTimes++; } if (rpcClientStatus.compareAndSet(RpcClientStatus.RUNNING, RpcClientStatus.UNHEALTHY)) { switchServerAsyncOnRequestFail(); } if (exceptionToThrow != null) { throw (exceptionToThrow instanceof NacosException) ? (NacosException) exceptionToThrow : new NacosException(SERVER_ERROR, exceptionToThrow); } else { throw new NacosException(SERVER_ERROR, "AsyncRequest fail, unknown error"); } }
@Test void testAsyncRequestWithoutAnyTry() throws NacosException { assertThrows(NacosException.class, () -> { when(rpcClientConfig.retryTimes()).thenReturn(-1); rpcClient.asyncRequest(null, null); }); }
private JobMetrics getJobMetrics() throws IOException { if (cachedMetricResults != null) { // Metric results have been cached after the job ran. return cachedMetricResults; } JobMetrics result = dataflowClient.getJobMetrics(dataflowPipelineJob.getJobId()); if (dataflowPipelineJob.getState().isTerminal()) { // Add current query result to the cache. cachedMetricResults = result; } return result; }
@Test public void testDistributionUpdatesStreaming() throws IOException { AppliedPTransform<?, ?, ?> myStep2 = mock(AppliedPTransform.class); when(myStep2.getFullName()).thenReturn("myStepName"); BiMap<AppliedPTransform<?, ?, ?>, String> transformStepNames = HashBiMap.create(); transformStepNames.put(myStep2, "s2"); JobMetrics jobMetrics = new JobMetrics(); DataflowClient dataflowClient = mock(DataflowClient.class); when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics); DataflowPipelineJob job = mock(DataflowPipelineJob.class); DataflowPipelineOptions options = mock(DataflowPipelineOptions.class); when(options.isStreaming()).thenReturn(true); when(job.getDataflowOptions()).thenReturn(options); when(job.getState()).thenReturn(State.RUNNING); when(job.getJobId()).thenReturn(JOB_ID); when(job.getTransformStepNames()).thenReturn(transformStepNames); // The parser relies on the fact that one tentative and one committed metric update exist in // the job metrics results. jobMetrics.setMetrics( ImmutableList.of( makeDistributionMetricUpdate( "distributionName", "distributionNamespace", "s2", 18L, 2L, 2L, 16L, false), makeDistributionMetricUpdate( "distributionName", "distributionNamespace", "s2", 18L, 2L, 2L, 16L, true))); DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient); MetricQueryResults result = dataflowMetrics.allMetrics(); try { result.getDistributions().iterator().next().getCommitted(); fail("Expected UnsupportedOperationException"); } catch (UnsupportedOperationException expected) { assertThat( expected.getMessage(), containsString( "This runner does not currently support committed" + " metrics results. Please use 'attempted' instead.")); } assertThat( result.getDistributions(), contains( attemptedMetricsResult( "distributionNamespace", "distributionName", "myStepName", DistributionResult.create(18, 2, 2, 16)))); }
public static String validatePath(String path) { if (!path.startsWith(AGENT_PATH)) { return ""; } String fixPath = path; for (String symbol : INVALID_SYMBOL) { fixPath = fixPath.replace(symbol, ""); } return fixPath; }
@Test public void testPath() { String pathValid = FileUtils.class.getProtectionDomain().getCodeSource().getLocation().getPath(); Assert.assertEquals(pathValid, FileUtils.validatePath(pathValid)); String pathInvalid = "/test/path"; Assert.assertEquals("", FileUtils.validatePath(pathInvalid)); String pathInvalidSymbolA = pathValid + "../" + "/test"; Assert.assertEquals(pathValid + "/test", FileUtils.validatePath(pathInvalidSymbolA)); String pathInvalidSymbolB = pathValid + "..\\" + "/test"; Assert.assertEquals(pathValid + "/test", FileUtils.validatePath(pathInvalidSymbolB)); }
public static Builder custom() { return new Builder(); }
@Test(expected = IllegalArgumentException.class) public void zeroMaxFailuresShouldFail() { custom().failureRateThreshold(0).build(); }
private RemotingCommand deleteUser(ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { final RemotingCommand response = RemotingCommand.createResponseCommand(null); DeleteUserRequestHeader requestHeader = request.decodeCommandCustomHeader(DeleteUserRequestHeader.class); this.brokerController.getAuthenticationMetadataManager().getUser(requestHeader.getUsername()) .thenCompose(user -> { if (user == null) { return CompletableFuture.completedFuture(null); } if (user.getUserType() == UserType.SUPER && isNotSuperUserLogin(request)) { throw new AuthenticationException("The super user can only be update by super user"); } return this.brokerController.getAuthenticationMetadataManager().deleteUser(requestHeader.getUsername()); }).thenAccept(nil -> response.setCode(ResponseCode.SUCCESS)) .exceptionally(ex -> { LOGGER.error("delete user {} error", requestHeader.getUsername(), ex); return handleAuthException(response, ex); }) .join(); return response; }
@Test public void testDeleteUser() throws RemotingCommandException { when(authenticationMetadataManager.deleteUser(any(String.class))) .thenReturn(CompletableFuture.completedFuture(null)); when(authenticationMetadataManager.getUser(eq("abc"))).thenReturn(CompletableFuture.completedFuture(User.of("abc", "123", UserType.NORMAL))); when(authenticationMetadataManager.getUser(eq("super"))).thenReturn(CompletableFuture.completedFuture(User.of("super", "123", UserType.SUPER))); DeleteUserRequestHeader deleteUserRequestHeader = new DeleteUserRequestHeader(); deleteUserRequestHeader.setUsername("abc"); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.AUTH_DELETE_USER, deleteUserRequestHeader); request.setVersion(441); request.addExtField("AccessKey", "rocketmq"); request.makeCustomHeaderToNet(); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); when(authenticationMetadataManager.isSuperUser(eq("rocketmq"))).thenReturn(CompletableFuture.completedFuture(true)); deleteUserRequestHeader = new DeleteUserRequestHeader(); deleteUserRequestHeader.setUsername("super"); request = RemotingCommand.createRequestCommand(RequestCode.AUTH_DELETE_USER, deleteUserRequestHeader); request.setVersion(441); request.addExtField("AccessKey", "rocketmq"); request.makeCustomHeaderToNet(); response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); when(authenticationMetadataManager.isSuperUser(eq("rocketmq"))).thenReturn(CompletableFuture.completedFuture(false)); response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.NO_PERMISSION); }
@VisibleForTesting static CliExecutor createExecutor(CommandLine commandLine) throws Exception { // The pipeline definition file would remain unparsed List<String> unparsedArgs = commandLine.getArgList(); if (unparsedArgs.isEmpty()) { throw new IllegalArgumentException( "Missing pipeline definition file path in arguments. "); } Path pipelineDefPath = Paths.get(unparsedArgs.get(0)); // Take the first unparsed argument as the pipeline definition file LOG.info("Real Path pipelineDefPath {}", pipelineDefPath); // Global pipeline configuration Configuration globalPipelineConfig = getGlobalConfig(commandLine); // Load Flink environment Path flinkHome = getFlinkHome(commandLine); Configuration flinkConfig = FlinkEnvironmentUtils.loadFlinkConfiguration(flinkHome); // Savepoint SavepointRestoreSettings savepointSettings = createSavepointRestoreSettings(commandLine); // Additional JARs List<Path> additionalJars = Arrays.stream( Optional.ofNullable( commandLine.getOptionValues(CliFrontendOptions.JAR)) .orElse(new String[0])) .map(Paths::get) .collect(Collectors.toList()); // Build executor return new CliExecutor( commandLine, pipelineDefPath, flinkConfig, globalPipelineConfig, commandLine.hasOption(CliFrontendOptions.USE_MINI_CLUSTER), additionalJars, savepointSettings); }
@Test void testSavePointConfiguration() throws Exception { CliExecutor executor = createExecutor( pipelineDef(), "--flink-home", flinkHome(), "-s", flinkHome() + "/savepoints/savepoint-1", "-cm", "no_claim", "-n"); assertThat(executor.getSavepointSettings().getRestorePath()) .isEqualTo(flinkHome() + "/savepoints/savepoint-1"); assertThat(executor.getSavepointSettings().getRestoreMode()) .isEqualTo(RestoreMode.NO_CLAIM); assertThat(executor.getSavepointSettings().allowNonRestoredState()).isTrue(); }
public static Collection<QualifiedTable> getTableNames(final ShardingSphereDatabase database, final DatabaseType protocolType, final Collection<IndexSegment> indexes) { Collection<QualifiedTable> result = new LinkedList<>(); String schemaName = new DatabaseTypeRegistry(protocolType).getDefaultSchemaName(database.getName()); for (IndexSegment each : indexes) { String actualSchemaName = each.getOwner().map(optional -> optional.getIdentifier().getValue()).orElse(schemaName); findLogicTableNameFromMetaData(database.getSchema(actualSchemaName), each.getIndexName().getIdentifier().getValue()).ifPresent(optional -> result.add(new QualifiedTable(actualSchemaName, optional))); } return result; }
@Test void assertGetTableNames() { IndexSegment indexSegment = new IndexSegment(0, 0, new IndexNameSegment(0, 0, new IdentifierValue(INDEX_NAME))); Collection<QualifiedTable> actual = IndexMetaDataUtils.getTableNames(buildDatabase(), TypedSPILoader.getService(DatabaseType.class, "FIXTURE"), Collections.singleton(indexSegment)); assertThat(actual.size(), is(1)); assertThat(actual.iterator().next().getSchemaName(), is(DefaultDatabase.LOGIC_NAME)); assertThat(actual.iterator().next().getTableName(), is(TABLE_NAME)); }
public static Builder builder() { return new Builder(ImmutableList.of()); }
@Test public void shouldThrowOnDuplicateHeaderColumnName() { // Given: final Builder builder = LogicalSchema.builder() .headerColumn(H0, Optional.of("key0")); // When: final Exception e = assertThrows( KsqlException.class, () -> builder.headerColumn(H0, Optional.of("key1")) ); // Then: assertThat(e.getMessage(), containsString("Duplicate headers columns found in schema: `h0` BYTES")); }
public static Row toBeamRow(GenericRecord record, Schema schema, ConversionOptions options) { List<Object> valuesInOrder = schema.getFields().stream() .map( field -> { try { org.apache.avro.Schema.Field avroField = record.getSchema().getField(field.getName()); Object value = avroField != null ? record.get(avroField.pos()) : null; return convertAvroFormat(field.getType(), value, options); } catch (Exception cause) { throw new IllegalArgumentException( "Error converting field " + field + ": " + cause.getMessage(), cause); } }) .collect(toList()); return Row.withSchema(schema).addValues(valuesInOrder).build(); }
@Test public void testToBeamRow_null() { Row beamRow = BigQueryUtils.toBeamRow(FLAT_TYPE, BQ_NULL_FLAT_ROW); assertEquals(NULL_FLAT_ROW, beamRow); }
public static ShenyuAdminResult success() { return success(""); }
@Test public void testSuccessWithMsgAndData() { final ShenyuAdminResult result = ShenyuAdminResult.success("msg", "data"); assertEquals(CommonErrorCode.SUCCESSFUL, result.getCode().intValue()); assertEquals("msg", result.getMessage()); assertEquals("data", result.getData()); assertEquals(6658928, result.hashCode()); assertEquals("ShenyuAdminResult{code=200, message='msg', data=data}", result.toString()); }
public static synchronized <T> T convertToObject(String value, Class<T> type) { try { return type.cast(loader.loadFromString(value)); } catch (MarkedYamlEngineException exception) { throw wrapExceptionToHiddenSensitiveData(exception); } }
@SuppressWarnings("unchecked") @Test void testYaml12Features() { // In YAML 1.2, only true and false strings are parsed as booleans (including True and // TRUE); y, yes, on, and their negative counterparts are parsed as strings. String booleanRepresentation = "key1: Yes\n" + "key2: y\n" + "key3: on"; Map<String, String> expectedBooleanRepresentation = new HashMap<>(); expectedBooleanRepresentation.put( "key1", "Yes"); // the value is expected to Boolean#True in YAML 1.1 expectedBooleanRepresentation.put( "key2", "y"); // the value is expected to Boolean#True in YAML 1.1 expectedBooleanRepresentation.put( "key3", "on"); // the value is expected to Boolean#True in YAML 1.1 assertThat(YamlParserUtils.convertToObject(booleanRepresentation, Map.class)) .containsAllEntriesOf(expectedBooleanRepresentation); // In YAML 1.2, underlines '_' cannot be used within numerical values. String underlineInNumber = "key1: 1_000"; assertThat(YamlParserUtils.convertToObject(underlineInNumber, Map.class)) .containsEntry( "key1", "1_000"); // In YAML 1.1, the expected value is number 1000 not a string. // In YAML 1.2, Octal values need a 0o prefix; e.g. 010 is now parsed with the value 10 // rather than 8. String octalNumber1 = "octal: 010"; assertThat(YamlParserUtils.convertToObject(octalNumber1, Map.class)) .containsEntry("octal", 10); // In YAML 1.1, the expected value is number 8. String octalNumber2 = "octal: 0o10"; assertThat(YamlParserUtils.convertToObject(octalNumber2, Map.class)) .containsEntry("octal", 8); // In YAML 1.2, the binary and sexagesimal integer formats have been dropped. String binaryNumber = "binary: 0b101"; assertThat(YamlParserUtils.convertToObject(binaryNumber, Map.class)) .containsEntry( "binary", "0b101"); // In YAML 1.1, the expected value is number 5 not a string. String sexagesimalNumber = "sexagesimal: 1:00"; assertThat(YamlParserUtils.convertToObject(sexagesimalNumber, Map.class)) .containsEntry( "sexagesimal", "1:00"); // In YAML 1.1, the expected value is number 60 not a string. // In YAML 1.2, the !!pairs, !!omap, !!set, !!timestamp and !!binary types have been // dropped. String timestamp = "!!timestamp 2001-12-15T02:59:43.1Z"; assertThatThrownBy(() -> YamlParserUtils.convertToObject(timestamp, Object.class)) .isInstanceOf(YamlEngineException.class); }
public static NotificationDispatcherMetadata newMetadata() { return METADATA; }
@Test public void myNewIssues_notification_is_disabled_at_global_level() { NotificationDispatcherMetadata metadata = NewIssuesNotificationHandler.newMetadata(); assertThat(metadata.getProperty(GLOBAL_NOTIFICATION)).isEqualTo("false"); }
@Override public double getValue(double quantile) { if (quantile < 0.0 || quantile > 1.0 || Double.isNaN( quantile )) { throw new IllegalArgumentException(quantile + " is not in [0..1]"); } if (values.length == 0) { return 0.0; } int posx = Arrays.binarySearch(quantiles, quantile); if (posx < 0) posx = ((-posx) - 1) - 1; if (posx < 1) { return values[0]; } if (posx >= values.length) { return values[values.length - 1]; } return values[posx]; }
@Test(expected = IllegalArgumentException.class) public void disallowsNegativeQuantile() { snapshot.getValue( -0.5 ); }
public static <N, E> Set<N> reachableNodes( Network<N, E> network, Set<N> startNodes, Set<N> endNodes) { Set<N> visitedNodes = new HashSet<>(); Queue<N> queuedNodes = new ArrayDeque<>(); queuedNodes.addAll(startNodes); // Perform a breadth-first traversal rooted at the input node. while (!queuedNodes.isEmpty()) { N currentNode = queuedNodes.remove(); // If we have already visited this node or it is a terminal node than do not add any // successors. if (!visitedNodes.add(currentNode) || endNodes.contains(currentNode)) { continue; } queuedNodes.addAll(network.successors(currentNode)); } return visitedNodes; }
@Test public void testReachableNodesFromAllRoots() { assertEquals( createNetwork().nodes(), Networks.reachableNodes( createNetwork(), ImmutableSet.of("A", "D", "I", "M", "O"), Collections.<String>emptySet())); }
private PlantUmlDiagram createDiagram(List<String> rawDiagramLines) { List<String> diagramLines = filterOutComments(rawDiagramLines); Set<PlantUmlComponent> components = parseComponents(diagramLines); PlantUmlComponents plantUmlComponents = new PlantUmlComponents(components); List<ParsedDependency> dependencies = parseDependencies(plantUmlComponents, diagramLines); return new PlantUmlDiagram.Builder(plantUmlComponents) .withDependencies(dependencies) .build(); }
@Test public void does_not_include_commented_out_lines() { PlantUmlDiagram diagram = createDiagram(TestDiagram.in(temporaryFolder) .component("uncommentedComponent").withAlias("uncommentedAlias").withStereoTypes("..uncommentedPackage..") .rawLine(" ' [commentedComponent] <<..commentedPackage..>> as commentedAlias") .rawLine("") .rawLine(" ' [uncommentedComponent] --> [commentedComponent]") .write()); PlantUmlComponent uncommentedComponent = getComponentWithName("uncommentedComponent", diagram); assertThat(getOnlyElement(diagram.getAllComponents())).isEqualTo(uncommentedComponent); assertThat(uncommentedComponent.getDependencies().isEmpty()).isTrue(); }
public static Read read() { return Read.create(); }
@Test public void testReadWithRuntimeParametersValidationDisabled() { ReadOptions options = PipelineOptionsFactory.fromArgs().withValidation().as(ReadOptions.class); BigtableIO.Read read = BigtableIO.read() .withoutValidation() .withProjectId(options.getBigtableProject()) .withInstanceId(options.getBigtableInstanceId()) .withTableId(options.getBigtableTableId()); // Not running a pipeline therefore this is expected. thrown.expect(PipelineRunMissingException.class); p.apply(read); }
private static ObjectNode get(ObjectNode parent, String childName) { JsonNode node = parent.path(childName); return node.isObject() && !node.isNull() ? (ObjectNode) node : null; }
@Test public void testAddressDecode() throws IOException { List<ExtensionMappingAddress> addresses = getLispExtensionMappingAddresses("LispExtensionMappingAddress.json"); new EqualsTester() .addEqualityGroup(addresses.get(0), listExtAddress) .addEqualityGroup(addresses.get(1), segmentExtAddress) .addEqualityGroup(addresses.get(2), asExtAddress) .addEqualityGroup(addresses.get(3), appDataExtAddress) .addEqualityGroup(addresses.get(4), gcExtAddress) .addEqualityGroup(addresses.get(5), natExtAddress) .addEqualityGroup(addresses.get(6), nonceExtAddress) .addEqualityGroup(addresses.get(7), multicastExtAddress) .addEqualityGroup(addresses.get(8), teExtAddress) .addEqualityGroup(addresses.get(9), srcDstExtAddress) .testEquals(); }
public boolean matches(String input) { return MATCHER.matches(input, pattern); }
@Test public void testMatchesOnExactString() throws Exception { GlobMatcher matcher = new GlobMatcher("AABBCC"); assertTrue(matcher.matches("AABBCC")); assertFalse(matcher.matches("FFFF")); }
@Override public KTable<K, VOut> aggregate(final Initializer<VOut> initializer, final Materialized<K, VOut, KeyValueStore<Bytes, byte[]>> materialized) { return aggregate(initializer, NamedInternal.empty(), materialized); }
@Test public void shouldNotHaveNullInitializerOnAggregateWitNamedAndMaterialized() { assertThrows(NullPointerException.class, () -> cogroupedStream.aggregate(null, Named.as("name"), Materialized.as("store"))); }
@PublicEvolving public static <IN, OUT> TypeInformation<OUT> getFlatMapReturnTypes( FlatMapFunction<IN, OUT> flatMapInterface, TypeInformation<IN> inType) { return getFlatMapReturnTypes(flatMapInterface, inType, null, false); }
@SuppressWarnings({"unchecked", "rawtypes"}) @Test void testTuple0() { // use getFlatMapReturnTypes() RichFlatMapFunction<?, ?> function = new RichFlatMapFunction<Tuple0, Tuple0>() { private static final long serialVersionUID = 1L; @Override public void flatMap(Tuple0 value, Collector<Tuple0> out) throws Exception { // nothing to do } }; TypeInformation<?> ti = TypeExtractor.getFlatMapReturnTypes( function, (TypeInformation) TypeInformation.of(new TypeHint<Tuple0>() {})); assertThat(ti.isTupleType()).isTrue(); assertThat(ti.getArity()).isZero(); assertThat(ti).isInstanceOf(TupleTypeInfo.class); }
public static URI getRelativeOutputPath(URI baseInputDir, URI inputFile, URI outputDir) { URI relativePath = baseInputDir.relativize(inputFile); Preconditions.checkState(relativePath.getPath().length() > 0 && !relativePath.equals(inputFile), "Unable to extract out the relative path for input file '" + inputFile + "', based on base input path: " + baseInputDir); String outputDirStr = sanitizeURIString(outputDir.toString()); outputDir = !outputDirStr.endsWith("/") ? URI.create(outputDirStr.concat("/")) : outputDir; URI relativeOutputURI = outputDir.resolve(relativePath).resolve("."); return relativeOutputURI; }
@Test public void testRelativeURIs() throws URISyntaxException { URI inputDirURI = new URI("hdfs://namenode1:9999/path/to/"); URI inputFileURI = new URI("hdfs://namenode1:9999/path/to/subdir/file"); URI outputDirURI = new URI("hdfs://namenode2/output/dir/"); URI segmentTarFileName = new URI("file.tar.gz"); URI outputSegmentTarURI = SegmentGenerationUtils.getRelativeOutputPath(inputDirURI, inputFileURI, outputDirURI) .resolve(segmentTarFileName); Assert.assertEquals(outputSegmentTarURI.toString(), "hdfs://namenode2/output/dir/subdir/file.tar.gz"); }
public Map<String, FieldMapping> fieldTypes(final String index) { final JsonNode result = client.executeRequest(request(index), "Unable to retrieve field types of index " + index); final JsonNode fields = result.path(index).path("mappings").path("properties"); //noinspection UnstableApiUsage return Streams.stream(fields.fields()) .collect(Collectors.toMap(Map.Entry::getKey, entry -> { final JsonNode entryValue = entry.getValue(); String type = entryValue.path("type").asText(); if ("alias".equals(type)) { String aliasPath = entryValue.path("path").asText(); type = fields.path(aliasPath).path("type").asText(); } return FieldMapping.create( type, entryValue.path("fielddata").asBoolean() ); })); }
@Test void testParsesMappingsCorrectly() throws Exception { String mappingResponse = """ { "graylog_42": { "mappings": { "properties": { "action": { "type": "keyword" }, "text": { "type": "text", "analyzer": "analyzer_keyword", "fielddata": true }, "date": { "type": "date" }, "number": { "type": "long", "fielddata": "false" } } } } } """; doReturn(objectMapper.readTree(mappingResponse)) .when(client) .executeRequest(eq(new Request("GET", "/graylog_42/_mapping")), anyString()); final Map<String, FieldMappingApi.FieldMapping> expectedResult = Map.of( "text", FieldMappingApi.FieldMapping.create("text", true), "action", FieldMappingApi.FieldMapping.create("keyword", false), "date", FieldMappingApi.FieldMapping.create("date", false), "number", FieldMappingApi.FieldMapping.create("long", false) ); final Map<String, FieldMappingApi.FieldMapping> result = toTest.fieldTypes("graylog_42"); assertEquals(expectedResult, result); }
@Override public Num calculate(BarSeries series, Position position) { return isBreakEvenPosition(position) ? series.one() : series.zero(); }
@Test public void calculateWithTwoLongPositions() { MockBarSeries series = new MockBarSeries(numFunction, 100, 105, 110, 100, 95, 105); TradingRecord tradingRecord = new BaseTradingRecord(Trade.buyAt(0, series), Trade.sellAt(3, series), Trade.buyAt(1, series), Trade.sellAt(5, series)); assertNumEquals(2, getCriterion().calculate(series, tradingRecord)); }
public static int totalPage(int totalCount, int pageSize) { return totalPage((long) totalCount,pageSize); }
@Test public void totalPage() { final int totalPage = PageUtil.totalPage(20, 3); assertEquals(7, totalPage); }
@Override public Column convert(BasicTypeDefine typeDefine) { try { return super.convert(typeDefine); } catch (SeaTunnelRuntimeException e) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String kingbaseDataType = typeDefine.getDataType().toUpperCase(); switch (kingbaseDataType) { case KB_TINYINT: builder.dataType(BasicType.BYTE_TYPE); break; case KB_MONEY: builder.dataType(new DecimalType(38, 18)); builder.columnLength(38L); builder.scale(18); break; case KB_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength((long) (1024 * 1024 * 1024)); break; case KB_CLOB: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(typeDefine.getLength()); builder.columnLength((long) (1024 * 1024 * 1024)); break; case KB_BIT: builder.dataType(PrimitiveByteArrayType.INSTANCE); // BIT(M) -> BYTE(M/8) long byteLength = typeDefine.getLength() / 8; byteLength += typeDefine.getLength() % 8 > 0 ? 1 : 0; builder.columnLength(byteLength); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.KINGBASE, typeDefine.getDataType(), typeDefine.getName()); } return builder.build(); } }
@Test public void testConvertTime() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder().name("test").columnType("time").dataType("time").build(); Column column = KingbaseTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(LocalTimeType.LOCAL_TIME_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getScale(), column.getScale()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("time(3)") .dataType("time") .length(3L) .build(); column = KingbaseTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(LocalTimeType.LOCAL_TIME_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getScale(), column.getScale()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("timetz") .dataType("timetz") .build(); column = KingbaseTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(LocalTimeType.LOCAL_TIME_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getScale(), column.getScale()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("timetz(3)") .dataType("timetz") .length(3L) .build(); column = KingbaseTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(LocalTimeType.LOCAL_TIME_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getScale(), column.getScale()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType().toLowerCase()); }
public void updateParentDir(File parentDir) { this.file = new File(parentDir, file.getName()); }
@Test public void testUpdateParentDir() { File tmpParentDir = new File(TestUtils.tempDirectory(), "parent"); tmpParentDir.mkdir(); assertNotEquals(tmpParentDir, index.file().getParentFile()); index.updateParentDir(tmpParentDir); assertEquals(tmpParentDir, index.file().getParentFile()); }
public static <I> Builder<I> foreach(Iterable<I> items) { return new Builder<>(requireNonNull(items, "items")); }
@Test public void testFailFastCallRevertSuppressed() throws Throwable { assertFailed(builder() .stopOnFailure() .revertWith(reverter) .abortWith(aborter) .suppressExceptions() .onFailure(failures), failingTask); failingTask.assertInvokedAtLeast("success", FAILPOINT); if (!isParallel()) { aborter.assertInvokedAtLeast("abort", 1); // all uncommitted items were aborted items.stream().filter(i -> !i.committed) .filter(i -> !i.failed) .forEach(Item::assertAborted); } // all committed were reverted items.stream().filter(i -> i.committed && !i.failed) .forEach(Item::assertReverted); // all reverted items are committed items.stream().filter(i -> i.reverted) .forEach(Item::assertCommitted); // only one failure was triggered failures.assertInvoked("failure event", 1); }
public static boolean isP2PK(Script script) { List<ScriptChunk> chunks = script.chunks(); if (chunks.size() != 2) return false; ScriptChunk chunk0 = chunks.get(0); if (chunk0.isOpCode()) return false; byte[] chunk0data = chunk0.data; if (chunk0data == null) return false; if (chunk0data.length <= 1) return false; if (!chunks.get(1).equalsOpCode(OP_CHECKSIG)) return false; return true; }
@Test public void testCreateP2PKOutputScript() { assertTrue(ScriptPattern.isP2PK( ScriptBuilder.createP2PKOutputScript(keys.get(0)) )); }
@Override public List<String> getAttributes() { return attributes; }
@Test public void testGetAttributes() throws Exception { final ListField list = new ListField("list", "The List", Collections.emptyList(), "Hello, this is a list", ConfigurationField.Optional.NOT_OPTIONAL); assertThat(list.getAttributes().size()).isEqualTo(0); final ListField list1 = new ListField("list", "The List", Collections.emptyList(), Collections.emptyMap(), "Hello, this is a list", ConfigurationField.Optional.NOT_OPTIONAL, ListField.Attribute.ALLOW_CREATE); assertThat(list1.getAttributes().size()).isEqualTo(1); assertThat(list1.getAttributes()).contains("allow_create"); }
public static <T, R> CheckedSupplier<R> andThen(CheckedSupplier<T> supplier, CheckedBiFunction<T, Throwable, R> handler) { return () -> { try { return handler.apply(supplier.get(), null); } catch (Throwable throwable) { return handler.apply(null, throwable); } }; }
@Test public void shouldRecoverFromException2() throws Throwable { CheckedSupplier<String> callable = () -> { throw new IllegalArgumentException("BAM!"); }; CheckedSupplier<String> callableWithRecovery = CheckedFunctionUtils.andThen(callable, (result, ex) -> { if(ex instanceof IllegalArgumentException){ return "Bla"; } return result; }); String result = callableWithRecovery.get(); assertThat(result).isEqualTo("Bla"); }
public List<KuduPredicate> convert(ScalarOperator operator) { if (operator == null) { return null; } return operator.accept(this, null); }
@Test public void testGt() { ConstantOperator value = ConstantOperator.createInt(5); ScalarOperator op = new BinaryPredicateOperator(BinaryType.GT, F0, value); List<KuduPredicate> result = CONVERTER.convert(op); Assert.assertEquals(result.get(0).toString(), "`f0` >= 6"); }
public boolean isSupportJraft() { return supportJraft; }
@Test void testDeserializeServerNamingAbilityForNonExistItem() throws JsonProcessingException { String nonExistItemJson = "{\"exampleAbility\":false}"; ServerNamingAbility actual = jacksonMapper.readValue(nonExistItemJson, ServerNamingAbility.class); assertFalse(actual.isSupportJraft()); }
List<Condition> run(boolean useKRaft) { List<Condition> warnings = new ArrayList<>(); checkKafkaReplicationConfig(warnings); checkKafkaBrokersStorage(warnings); if (useKRaft) { // Additional checks done for KRaft clusters checkKRaftControllerStorage(warnings); checkKRaftControllerCount(warnings); checkKafkaMetadataVersion(warnings); checkInterBrokerProtocolVersionInKRaft(warnings); checkLogMessageFormatVersionInKRaft(warnings); } else { // Additional checks done for ZooKeeper-based clusters checkKafkaLogMessageFormatVersion(warnings); checkKafkaInterBrokerProtocolVersion(warnings); checkKRaftMetadataStorageConfiguredForZooBasedCLuster(warnings); } return warnings; }
@Test public void checkReplicationFactorAndMinInSyncReplicasSetToOne() { Kafka kafka = new KafkaBuilder(KAFKA) .editSpec() .editKafka() .withConfig(Map.of( KafkaConfiguration.DEFAULT_REPLICATION_FACTOR, 1, KafkaConfiguration.MIN_INSYNC_REPLICAS, 1 )) .endKafka() .endSpec() .build(); KafkaSpecChecker checker = generateChecker(kafka, List.of(CONTROLLERS, POOL_A), KafkaVersionTestUtils.DEFAULT_KRAFT_VERSION_CHANGE); List<Condition> warnings = checker.run(true); assertThat(warnings, hasSize(0)); }
@Override public Expression resolveSelect(final int idx, final Expression expression) { final Expression resolved = columnMappings.get(idx); return resolved == null ? expression : resolved; }
@Test public void shouldResolveNoneUdtfSelectExpressionToSelf() { // Given: final Expression exp = mock(Expression.class); // When: final Expression result = flatMapNode.resolveSelect(0, exp); // Then: assertThat(result, is(exp)); }
static List<String> parseRemote(String[] args) { if (args.length < 3) { System.err.println("Specifies the <hostname> <portnumber> in 'remote' mode"); printRemoteHelp(); System.exit(0); } String[] params = new String[args.length - 3]; System.arraycopy(args, 3, params, 0, params.length); CommandLine commandLine = parse(REMOTE_OPTIONS, params); if (commandLine.hasOption(OPTION_HELP.getOpt())) { printRemoteHelp(); System.exit(0); } String host = args[1]; String port = args[2]; List<String> options = new ArrayList<>(); options.add(args[0]); options.add("-m"); options.add(host + ":" + port); return options; }
@Test void testParseRemoteWithoutOptions() { String[] args = {"remote", "localhost", "8081"}; List<String> commandOptions = PythonShellParser.parseRemote(args); String[] expectedCommandOptions = {"remote", "-m", "localhost:8081"}; assertThat(commandOptions.toArray()).isEqualTo(expectedCommandOptions); }
@DELETE @Path("{id}") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response removeSecurityGroup(@PathParam("id") String id) { log.trace(String.format(MESSAGE, "REMOVE " + id)); if (!haService.isActive() && !DEFAULT_ACTIVE_IP_ADDRESS.equals(haService.getActiveIp())) { return syncDelete(haService, SECURITY_GROUPS, id); } adminService.removeSecurityGroup(id); return noContent().build(); }
@Test public void testDeleteSecurityGroupWithDeletionOperation() { expect(mockOpenstackHaService.isActive()).andReturn(true).anyTimes(); replay(mockOpenstackHaService); mockOpenstackSecurityGroupAdminService.removeSecurityGroup(anyString()); replay(mockOpenstackSecurityGroupAdminService); final WebTarget wt = target(); Response response = wt.path(PATH + "/2076db17-a522-4506-91de-c6dd8e837028") .request(MediaType.APPLICATION_JSON_TYPE) .delete(); final int status = response.getStatus(); assertThat(status, is(204)); verify(mockOpenstackSecurityGroupAdminService); }
public Map<String, V> asMap() { return unmodifiableMap; }
@Test public void testAsMap() { DomainNameMapping<String> mapping = new DomainNameMapping<String>("NotFound") .add("netty.io", "Netty") .add("downloads.netty.io", "Netty-Downloads"); Map<String, String> entries = mapping.asMap(); assertEquals(2, entries.size()); assertEquals("Netty", entries.get("netty.io")); assertEquals("Netty-Downloads", entries.get("downloads.netty.io")); }
public Map<TaskId, Set<TopicPartition>> partitionGroups(final Map<Subtopology, Set<String>> topicGroups, final Cluster metadata) { return partitionGroups(topicGroups, new HashMap<>(), new HashMap<>(), metadata); }
@Test public void shouldComputeGroupingForTwoGroups() { final PartitionGrouper grouper = new PartitionGrouper(); final Map<TaskId, Set<TopicPartition>> expectedPartitionsForTask = new HashMap<>(); final Map<Subtopology, Set<String>> topicGroups = new HashMap<>(); topicGroups.put(SUBTOPOLOGY_0, mkSet("topic1")); expectedPartitionsForTask.put(new TaskId(SUBTOPOLOGY_0.nodeGroupId, 0, SUBTOPOLOGY_0.namedTopology), mkSet(new TopicPartition("topic1", 0))); expectedPartitionsForTask.put(new TaskId(SUBTOPOLOGY_0.nodeGroupId, 1, SUBTOPOLOGY_0.namedTopology), mkSet(new TopicPartition("topic1", 1))); expectedPartitionsForTask.put(new TaskId(SUBTOPOLOGY_0.nodeGroupId, 2, SUBTOPOLOGY_0.namedTopology), mkSet(new TopicPartition("topic1", 2))); topicGroups.put(SUBTOPOLOGY_1, mkSet("topic2")); expectedPartitionsForTask.put(new TaskId(SUBTOPOLOGY_1.nodeGroupId, 0, SUBTOPOLOGY_1.namedTopology), mkSet(new TopicPartition("topic2", 0))); expectedPartitionsForTask.put(new TaskId(SUBTOPOLOGY_1.nodeGroupId, 1, SUBTOPOLOGY_1.namedTopology), mkSet(new TopicPartition("topic2", 1))); assertEquals(expectedPartitionsForTask, grouper.partitionGroups(topicGroups, metadata)); }
public void isNotEmpty() { if (checkNotNull(actual).isEmpty()) { failWithoutActual(simpleFact("expected not to be empty")); } }
@Test public void tableIsNotEmptyWithFailure() { ImmutableTable<Integer, Integer, Integer> table = ImmutableTable.of(); expectFailureWhenTestingThat(table).isNotEmpty(); assertFailureKeys("expected not to be empty"); }
@Override public ParseResult parsePath(String path) { String original = path; path = path.replace('/', '\\'); if (WORKING_DIR_WITH_DRIVE.matcher(path).matches()) { throw new InvalidPathException( original, "Jimfs does not currently support the Windows syntax for a relative path " + "on a specific drive (e.g. \"C:foo\\bar\")"); } String root; if (path.startsWith("\\\\")) { root = parseUncRoot(path, original); } else if (path.startsWith("\\")) { throw new InvalidPathException( original, "Jimfs does not currently support the Windows syntax for an absolute path " + "on the current drive (e.g. \"\\foo\\bar\")"); } else { root = parseDriveRoot(path); } // check for root.length() > 3 because only "C:\" type roots are allowed to have : int startIndex = root == null || root.length() > 3 ? 0 : root.length(); for (int i = startIndex; i < path.length(); i++) { char c = path.charAt(i); if (isReserved(c)) { throw new InvalidPathException(original, "Illegal char <" + c + ">", i); } } Matcher trailingSpaceMatcher = TRAILING_SPACES.matcher(path); if (trailingSpaceMatcher.find()) { throw new InvalidPathException(original, "Trailing char < >", trailingSpaceMatcher.start()); } if (root != null) { path = path.substring(root.length()); if (!root.endsWith("\\")) { root = root + "\\"; } } return new ParseResult(root, splitter().split(path)); }
@Test public void testWindows_absolutePathOnCurrentDrive_unsupported() { try { windows().parsePath("\\foo\\bar"); fail(); } catch (InvalidPathException expected) { } try { windows().parsePath("\\"); fail(); } catch (InvalidPathException expected) { } }
public static FairyRingClue forText(String text) { for (FairyRingClue clue : CLUES) { if (clue.text.equalsIgnoreCase(text)) { return clue; } } return null; }
@Test public void forTextEmptyString() { assertNull(FairyRingClue.forText("")); }
@Override public List<Service> getServiceDefinitions() throws MockRepositoryImportException { List<Service> result = new ArrayList<>(); // Build a new service. Service service = new Service(); // Collection V2 as an info node. if (collection.has("info")) { isV2Collection = true; fillServiceDefinition(service); } else { throw new MockRepositoryImportException("Only Postman v2 Collection are supported."); } // Then build its operations. try { service.setOperations(extractOperations()); } catch (Throwable t) { log.error("Runtime exception while extracting Operations for {}", service.getName()); throw new MockRepositoryImportException("Runtime exception for " + service.getName() + ": " + t.getMessage()); } result.add(service); return result; }
@Test void testTestAPINoVersionImport() { PostmanCollectionImporter importer = null; try { importer = new PostmanCollectionImporter( "target/test-classes/io/github/microcks/util/postman/Test API no version.postman_collection.json"); } catch (IOException ioe) { fail("Exception should not be thrown"); } // Check that basic service properties import fail because of missing version. boolean failure = false; List<Service> services = null; try { services = importer.getServiceDefinitions(); } catch (MockRepositoryImportException e) { failure = true; assertNotEquals(-1, e.getMessage().indexOf("Version property")); } assertTrue(failure); }
protected abstract Iterator<C> containerIterator(int partitionId);
@Test public void testEmptyIterator() { TestContainerCollector collector = new TestContainerCollector(nodeEngine, false, false); Iterator<Object> iterator = collector.containerIterator(0); assertInstanceOf(AbstractContainerCollector.EmptyIterator.class, iterator); assertFalse("Expected no next elements in iterator", iterator.hasNext()); try { iterator.next(); fail("Expected EmptyIterator.next() to throw NoSuchElementException"); } catch (NoSuchElementException expected) { ignore(expected); } try { iterator.remove(); fail("Expected EmptyIterator.remove() to throw UnsupportedOperationException"); } catch (UnsupportedOperationException expected) { ignore(expected); } }
private JobMetrics getJobMetrics() throws IOException { if (cachedMetricResults != null) { // Metric results have been cached after the job ran. return cachedMetricResults; } JobMetrics result = dataflowClient.getJobMetrics(dataflowPipelineJob.getJobId()); if (dataflowPipelineJob.getState().isTerminal()) { // Add current query result to the cache. cachedMetricResults = result; } return result; }
@Test public void testDistributionUpdates() throws IOException { AppliedPTransform<?, ?, ?> myStep2 = mock(AppliedPTransform.class); when(myStep2.getFullName()).thenReturn("myStepName"); BiMap<AppliedPTransform<?, ?, ?>, String> transformStepNames = HashBiMap.create(); transformStepNames.put(myStep2, "s2"); JobMetrics jobMetrics = new JobMetrics(); DataflowClient dataflowClient = mock(DataflowClient.class); when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics); DataflowPipelineJob job = mock(DataflowPipelineJob.class); DataflowPipelineOptions options = mock(DataflowPipelineOptions.class); when(options.isStreaming()).thenReturn(false); when(job.getDataflowOptions()).thenReturn(options); when(job.getState()).thenReturn(State.RUNNING); when(job.getJobId()).thenReturn(JOB_ID); when(job.getTransformStepNames()).thenReturn(transformStepNames); // The parser relies on the fact that one tentative and one committed metric update exist in // the job metrics results. jobMetrics.setMetrics( ImmutableList.of( makeDistributionMetricUpdate( "distributionName", "distributionNamespace", "s2", 18L, 2L, 2L, 16L, false), makeDistributionMetricUpdate( "distributionName", "distributionNamespace", "s2", 18L, 2L, 2L, 16L, true))); DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient); MetricQueryResults result = dataflowMetrics.allMetrics(); assertThat( result.getDistributions(), contains( attemptedMetricsResult( "distributionNamespace", "distributionName", "myStepName", DistributionResult.create(18, 2, 2, 16)))); assertThat( result.getDistributions(), contains( committedMetricsResult( "distributionNamespace", "distributionName", "myStepName", DistributionResult.create(18, 2, 2, 16)))); }
public static Set<LinkAbstraction> parseJuniperLldp(HierarchicalConfiguration info) { Set<LinkAbstraction> neighbour = new HashSet<>(); List<HierarchicalConfiguration> subtrees = info.configurationsAt(LLDP_LIST_NBR_INFO); for (HierarchicalConfiguration neighborsInfo : subtrees) { List<HierarchicalConfiguration> neighbors = neighborsInfo.configurationsAt(LLDP_NBR_INFO); for (HierarchicalConfiguration neighbor : neighbors) { String localPortName = neighbor.getString(LLDP_LO_PORT); MacAddress mac = MacAddress.valueOf(neighbor.getString(LLDP_REM_CHASS)); String remotePortId = null; long remotePortIndex = -1; String remotePortIdSubtype = neighbor.getString(LLDP_REM_PORT_SUBTYPE, null); if (remotePortIdSubtype != null) { if (remotePortIdSubtype.equals(LLDP_SUBTYPE_MAC) || remotePortIdSubtype.equals(LLDP_SUBTYPE_INTERFACE_NAME)) { remotePortId = neighbor.getString(LLDP_REM_PORT, null); } else { remotePortIndex = neighbor.getLong(LLDP_REM_PORT, -1); } } String remotePortDescription = neighbor.getString(LLDP_REM_PORT_DES, null); LinkAbstraction link = new LinkAbstraction( localPortName, mac.toLong(), remotePortIndex, remotePortId, remotePortDescription); neighbour.add(link); } } return neighbour; }
@Test public void testLldpNeighborsInformationParsedFromJunos18() { HierarchicalConfiguration reply = XmlConfigParser.loadXml( getClass().getResourceAsStream("/Junos_get-lldp-neighbors-information_response_18.4.xml")); final Set<JuniperUtils.LinkAbstraction> expected = new HashSet<>(); expected.add(new JuniperUtils.LinkAbstraction("ge-0/0/1", 0x2c6bf509b0c0L, 527L, null, null)); expected.add(new JuniperUtils.LinkAbstraction("ge-0/0/2", 0x2c6bf5bde7c0L, 528L, null, null)); assertEquals(expected, JuniperUtils.parseJuniperLldp(reply)); }
@Override public void setApplicationContext(final ApplicationContext applicationContext) throws BeansException { SpringBeanUtils.getInstance().setApplicationContext(applicationContext); }
@Test public void testSetApplicationContext() throws NoSuchFieldException { final ApplicationContext applicationContext = mock(ConfigurableApplicationContext.class); shenyuApplicationContextAwareUnderTest.setApplicationContext(applicationContext); assertNotNull(SpringBeanUtils.getInstance().getClass().getDeclaredField("applicationContext")); }
String getLoginUrl() { return configuration.get(LOGIN_URL).orElseThrow(() -> new IllegalArgumentException("Login URL is missing")); }
@Test public void fail_to_get_login_url_when_null() { assertThatThrownBy(() -> underTest.getLoginUrl()) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Login URL is missing"); }
public void resetPositionsIfNeeded() { Map<TopicPartition, Long> offsetResetTimestamps = offsetFetcherUtils.getOffsetResetTimestamp(); if (offsetResetTimestamps.isEmpty()) return; resetPositionsAsync(offsetResetTimestamps); }
@Test public void testUpdateFetchPositionOfPausedPartitionsRequiringOffsetReset() { buildFetcher(); assignFromUser(singleton(tp0)); subscriptions.pause(tp0); // paused partition does not have a valid position subscriptions.requestOffsetReset(tp0, OffsetResetStrategy.LATEST); client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP, validLeaderEpoch), listOffsetResponse(Errors.NONE, 1L, 10L)); offsetFetcher.resetPositionsIfNeeded(); consumerClient.pollNoWakeup(); assertFalse(subscriptions.isOffsetResetNeeded(tp0)); assertFalse(subscriptions.isFetchable(tp0)); // because tp is paused assertTrue(subscriptions.hasValidPosition(tp0)); assertEquals(10, subscriptions.position(tp0).offset); }
public static OffsetBasedPagination forOffset(int offset, int pageSize) { checkArgument(offset >= 0, "offset must be >= 0"); checkArgument(pageSize >= 1, "page size must be >= 1"); return new OffsetBasedPagination(offset, pageSize); }
@Test void hashcode_whenDifferentPageSize_shouldBeNotEquals() { Assertions.assertThat(OffsetBasedPagination.forOffset(0, 20)) .doesNotHaveSameHashCodeAs(OffsetBasedPagination.forOffset(0, 40)); }
@Override public Class<? extends Event> subscribeType() { return InstancesChangeEvent.class; }
@Test void testSubscribeType() { assertEquals(InstancesChangeEvent.class, instancesChangeNotifier.subscribeType()); }
@Override @Transactional(rollbackFor = Exception.class) public void updateDiscountActivity(DiscountActivityUpdateReqVO updateReqVO) { // 校验存在 DiscountActivityDO discountActivity = validateDiscountActivityExists(updateReqVO.getId()); if (discountActivity.getStatus().equals(CommonStatusEnum.DISABLE.getStatus())) { // 已关闭的活动,不能修改噢 throw exception(DISCOUNT_ACTIVITY_UPDATE_FAIL_STATUS_CLOSED); } // 校验商品是否冲突 validateDiscountActivityProductConflicts(updateReqVO.getId(), updateReqVO.getProducts()); // 更新活动 DiscountActivityDO updateObj = DiscountActivityConvert.INSTANCE.convert(updateReqVO) .setStatus(PromotionUtils.calculateActivityStatus(updateReqVO.getEndTime())); discountActivityMapper.updateById(updateObj); // 更新商品 updateDiscountProduct(updateReqVO); }
@Test public void testUpdateDiscountActivity_notExists() { // 准备参数 DiscountActivityUpdateReqVO reqVO = randomPojo(DiscountActivityUpdateReqVO.class); // 调用, 并断言异常 assertServiceException(() -> discountActivityService.updateDiscountActivity(reqVO), DISCOUNT_ACTIVITY_NOT_EXISTS); }
public V computeIfAbsent(int specId, StructLike struct, Supplier<V> valueSupplier) { Map<StructLike, V> partitionMap = partitionMaps.computeIfAbsent(specId, this::newPartitionMap); return partitionMap.computeIfAbsent(struct, key -> valueSupplier.get()); }
@Test public void testComputeIfAbsent() { PartitionMap<String> map = PartitionMap.create(SPECS); String result1 = map.computeIfAbsent(BY_DATA_SPEC.specId(), Row.of("a"), () -> "v1"); assertThat(result1).isEqualTo("v1"); assertThat(map.get(BY_DATA_SPEC.specId(), CustomRow.of("a"))).isEqualTo("v1"); // verify existing key is not affected String result2 = map.computeIfAbsent(BY_DATA_SPEC.specId(), CustomRow.of("a"), () -> "v2"); assertThat(result2).isEqualTo("v1"); assertThat(map.get(BY_DATA_SPEC.specId(), CustomRow.of("a"))).isEqualTo("v1"); }
@SuppressWarnings("MethodLength") static void dissectControlRequest( final ArchiveEventCode eventCode, final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int encodedLength = dissectLogHeader(CONTEXT, eventCode, buffer, offset, builder); HEADER_DECODER.wrap(buffer, offset + encodedLength); encodedLength += MessageHeaderDecoder.ENCODED_LENGTH; switch (eventCode) { case CMD_IN_CONNECT: CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendConnect(builder); break; case CMD_IN_CLOSE_SESSION: CLOSE_SESSION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendCloseSession(builder); break; case CMD_IN_START_RECORDING: START_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording(builder); break; case CMD_IN_STOP_RECORDING: STOP_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecording(builder); break; case CMD_IN_REPLAY: REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplay(builder); break; case CMD_IN_STOP_REPLAY: STOP_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplay(builder); break; case CMD_IN_LIST_RECORDINGS: LIST_RECORDINGS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordings(builder); break; case CMD_IN_LIST_RECORDINGS_FOR_URI: LIST_RECORDINGS_FOR_URI_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingsForUri(builder); break; case CMD_IN_LIST_RECORDING: LIST_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecording(builder); break; case CMD_IN_EXTEND_RECORDING: EXTEND_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording(builder); break; case CMD_IN_RECORDING_POSITION: RECORDING_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendRecordingPosition(builder); break; case CMD_IN_TRUNCATE_RECORDING: TRUNCATE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTruncateRecording(builder); break; case CMD_IN_STOP_RECORDING_SUBSCRIPTION: STOP_RECORDING_SUBSCRIPTION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingSubscription(builder); break; case CMD_IN_STOP_POSITION: STOP_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopPosition(builder); break; case CMD_IN_FIND_LAST_MATCHING_RECORD: FIND_LAST_MATCHING_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendFindLastMatchingRecord(builder); break; case CMD_IN_LIST_RECORDING_SUBSCRIPTIONS: LIST_RECORDING_SUBSCRIPTIONS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingSubscriptions(builder); break; case CMD_IN_START_BOUNDED_REPLAY: BOUNDED_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartBoundedReplay(builder); break; case CMD_IN_STOP_ALL_REPLAYS: STOP_ALL_REPLAYS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopAllReplays(builder); break; case CMD_IN_REPLICATE: REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate(builder); break; case CMD_IN_STOP_REPLICATION: STOP_REPLICATION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplication(builder); break; case CMD_IN_START_POSITION: START_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartPosition(builder); break; case CMD_IN_DETACH_SEGMENTS: DETACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDetachSegments(builder); break; case CMD_IN_DELETE_DETACHED_SEGMENTS: DELETE_DETACHED_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDeleteDetachedSegments(builder); break; case CMD_IN_PURGE_SEGMENTS: PURGE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeSegments(builder); break; case CMD_IN_ATTACH_SEGMENTS: ATTACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAttachSegments(builder); break; case CMD_IN_MIGRATE_SEGMENTS: MIGRATE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendMigrateSegments(builder); break; case CMD_IN_AUTH_CONNECT: AUTH_CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAuthConnect(builder); break; case CMD_IN_KEEP_ALIVE: KEEP_ALIVE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendKeepAlive(builder); break; case CMD_IN_TAGGED_REPLICATE: TAGGED_REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTaggedReplicate(builder); break; case CMD_IN_START_RECORDING2: START_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording2(builder); break; case CMD_IN_EXTEND_RECORDING2: EXTEND_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording2(builder); break; case CMD_IN_STOP_RECORDING_BY_IDENTITY: STOP_RECORDING_BY_IDENTITY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingByIdentity(builder); break; case CMD_IN_PURGE_RECORDING: PURGE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeRecording(builder); break; case CMD_IN_REPLICATE2: REPLICATE_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate2(builder); break; case CMD_IN_REQUEST_REPLAY_TOKEN: REPLAY_TOKEN_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplayToken(builder); break; default: builder.append(": unknown command"); } }
@Test void controlRequestTruncateRecording() { internalEncodeLogHeader(buffer, 0, 12, 32, () -> 10_000_000_000L); final TruncateRecordingRequestEncoder requestEncoder = new TruncateRecordingRequestEncoder(); requestEncoder.wrapAndApplyHeader(buffer, LOG_HEADER_LENGTH, headerEncoder) .controlSessionId(2) .correlationId(3) .recordingId(8) .position(1_000_000); dissectControlRequest(CMD_IN_TRUNCATE_RECORDING, buffer, 0, builder); assertEquals("[10.000000000] " + CONTEXT + ": " + CMD_IN_TRUNCATE_RECORDING.name() + " [12/32]:" + " controlSessionId=2" + " correlationId=3" + " recordingId=8" + " position=1000000", builder.toString()); }
@PublicAPI(usage = ACCESS) public JavaClasses importUrl(URL url) { return importUrls(singletonList(url)); }
@Test public void creates_relations_between_interfaces_and_subclasses() { JavaClasses classes = new ClassFileImporter().importUrl(getClass().getResource("testexamples/classhierarchyimport")); JavaClass baseClass = classes.get(BaseClass.class); JavaClass otherInterface = classes.get(OtherInterface.class); JavaClass subclass = classes.get(Subclass.class); JavaClass subSubclass = classes.get(SubSubclass.class); JavaClass subSubSubclass = classes.get(SubSubSubclass.class); JavaClass subSubSubSubclass = classes.get(SubSubSubSubclass.class); JavaClass subinterface = classes.get(Subinterface.class); JavaClass otherSubclass = classes.get(OtherSubclass.class); JavaClass parentInterface = classes.get(ParentInterface.class); JavaClass grandParentInterface = classes.get(GrandParentInterface.class); JavaClass someCollection = classes.get(SomeCollection.class); JavaClass collectionInterface = classes.get(CollectionInterface.class); assertThat(grandParentInterface.getSubclasses()).containsOnly(parentInterface, otherInterface); assertThat(grandParentInterface.getAllSubclasses()).containsOnly( parentInterface, subinterface, otherInterface, baseClass, subclass, otherSubclass, subSubclass, subSubSubclass, subSubSubSubclass, someCollection ); assertThat(parentInterface.getSubclasses()).containsOnly(subinterface, otherSubclass); assertThat(parentInterface.getAllSubclasses()).containsOnly( subinterface, subclass, subSubclass, subSubSubclass, subSubSubSubclass, someCollection, otherSubclass); JavaClass collection = getOnlyElement(collectionInterface.getRawInterfaces()); assertThat(collection.getAllSubclasses()).containsOnly(collectionInterface, someCollection); }
@GetMapping("/authorize") @Operation(summary = "获得授权信息", description = "适合 code 授权码模式,或者 implicit 简化模式;在 sso.vue 单点登录界面被【获取】调用") @Parameter(name = "clientId", required = true, description = "客户端编号", example = "tudou") public CommonResult<OAuth2OpenAuthorizeInfoRespVO> authorize(@RequestParam("clientId") String clientId) { // 0. 校验用户已经登录。通过 Spring Security 实现 // 1. 获得 Client 客户端的信息 OAuth2ClientDO client = oauth2ClientService.validOAuthClientFromCache(clientId); // 2. 获得用户已经授权的信息 List<OAuth2ApproveDO> approves = oauth2ApproveService.getApproveList(getLoginUserId(), getUserType(), clientId); // 拼接返回 return success(OAuth2OpenConvert.INSTANCE.convert(client, approves)); }
@Test public void testAuthorize() { // 准备参数 String clientId = randomString(); // mock 方法(client) OAuth2ClientDO client = randomPojo(OAuth2ClientDO.class).setClientId("demo_client_id").setScopes(ListUtil.toList("read", "write", "all")); when(oauth2ClientService.validOAuthClientFromCache(eq(clientId))).thenReturn(client); // mock 方法(approve) List<OAuth2ApproveDO> approves = asList( randomPojo(OAuth2ApproveDO.class).setScope("read").setApproved(true), randomPojo(OAuth2ApproveDO.class).setScope("write").setApproved(false)); when(oauth2ApproveService.getApproveList(isNull(), eq(UserTypeEnum.ADMIN.getValue()), eq(clientId))).thenReturn(approves); // 调用 CommonResult<OAuth2OpenAuthorizeInfoRespVO> result = oauth2OpenController.authorize(clientId); // 断言 assertEquals(0, result.getCode()); assertPojoEquals(client, result.getData().getClient()); assertEquals(new KeyValue<>("read", true), result.getData().getScopes().get(0)); assertEquals(new KeyValue<>("write", false), result.getData().getScopes().get(1)); assertEquals(new KeyValue<>("all", false), result.getData().getScopes().get(2)); }
@Override public List<Integer> applyTransforms(List<Integer> originalGlyphIds) { List<Integer> intermediateGlyphsFromGsub = adjustRephPosition(originalGlyphIds); intermediateGlyphsFromGsub = repositionGlyphs(intermediateGlyphsFromGsub); for (String feature : FEATURES_IN_ORDER) { if (!gsubData.isFeatureSupported(feature)) { if (feature.equals(RKRF_FEATURE) && gsubData.isFeatureSupported(VATU_FEATURE)) { // Create your own rkrf feature from vatu feature intermediateGlyphsFromGsub = applyRKRFFeature( gsubData.getFeature(VATU_FEATURE), intermediateGlyphsFromGsub); } LOG.debug("the feature {} was not found", feature); continue; } LOG.debug("applying the feature {}", feature); ScriptFeature scriptFeature = gsubData.getFeature(feature); intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature, intermediateGlyphsFromGsub); } return Collections.unmodifiableList(intermediateGlyphsFromGsub); }
@Test void testApplyTransforms_half() { // given List<Integer> glyphsAfterGsub = Arrays.asList(205,195,206); // when List<Integer> result = gsubWorkerForGujarati.applyTransforms(getGlyphIds("ત્ચ્થ્")); // then assertEquals(glyphsAfterGsub, result); }
public static Builder builder() { return new Builder(); }
@Test public void testBuilderDoesNotCreateInvalidObjects() { assertThatThrownBy(() -> ListNamespacesResponse.builder().add(null).build()) .isInstanceOf(NullPointerException.class) .hasMessage("Invalid namespace: null"); assertThatThrownBy(() -> ListNamespacesResponse.builder().addAll(null).build()) .isInstanceOf(NullPointerException.class) .hasMessage("Invalid namespace list: null"); List<Namespace> listWithNullElement = Lists.newArrayList(Namespace.of("a"), null); assertThatThrownBy(() -> ListNamespacesResponse.builder().addAll(listWithNullElement).build()) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Invalid namespace: null"); }
@Override @Deprecated public <K1, V1> KStream<K1, V1> flatTransform(final org.apache.kafka.streams.kstream.TransformerSupplier<? super K, ? super V, Iterable<KeyValue<K1, V1>>> transformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(transformerSupplier, "transformerSupplier can't be null"); final String name = builder.newProcessorName(TRANSFORM_NAME); return flatTransform(transformerSupplier, Named.as(name), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullStoreNameOnFlatTransform() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.flatTransform(flatTransformerSupplier, (String) null)); assertThat(exception.getMessage(), equalTo("stateStoreNames can't contain `null` as store name")); }
@Override public List<List<Integer>> split(List<Integer> glyphIds) { String originalGlyphsAsText = convertGlyphIdsToString(glyphIds); List<String> tokens = compoundCharacterTokenizer.tokenize(originalGlyphsAsText); List<List<Integer>> modifiedGlyphs = new ArrayList<>(tokens.size()); tokens.forEach(token -> modifiedGlyphs.add(convertGlyphIdsToList(token))); return modifiedGlyphs; }
@Test void testSplit_1() { // given Set<List<Integer>> matchers = new HashSet<>(Arrays.asList(Arrays.asList(84, 93), Arrays.asList(102, 82), Arrays.asList(104, 87))); GlyphArraySplitter testClass = new GlyphArraySplitterRegexImpl(matchers); List<Integer> glyphIds = Arrays.asList(84, 112, 93, 104, 82, 61, 96, 102, 93, 104, 87, 110); // when List<List<Integer>> tokens = testClass.split(glyphIds); // then assertEquals(Arrays.asList(Arrays.asList(84, 112, 93, 104, 82, 61, 96, 102, 93), Arrays.asList(104, 87), Arrays.asList(110)), tokens); }
@Override public void startLeaderElection(LeaderContender contender) throws Exception { synchronized (lock) { Preconditions.checkState( leaderContender == null, "No LeaderContender should have been registered with this LeaderElection, yet."); this.leaderContender = contender; this.leaderContender.grantLeadership(sessionID); } }
@Test void testStandaloneLeaderElectionRetrieval() throws Exception { final UUID expectedSessionID = UUID.randomUUID(); StandaloneLeaderRetrievalService leaderRetrievalService = new StandaloneLeaderRetrievalService(TEST_URL, expectedSessionID); TestingListener testingListener = new TestingListener(); try (LeaderElection leaderElection = new StandaloneLeaderElection(expectedSessionID)) { TestingContender contender = new TestingContender(TEST_URL, leaderElection); contender.startLeaderElection(); leaderRetrievalService.start(testingListener); contender.waitForLeader(); assertThat(contender.isLeader()).isTrue(); assertThat(contender.getLeaderSessionID()).isEqualTo(expectedSessionID); testingListener.waitForNewLeader(); assertThat(testingListener.getAddress()).isEqualTo(TEST_URL); assertThat(testingListener.getLeaderSessionID()).isEqualTo(expectedSessionID); } finally { leaderRetrievalService.stop(); } }
@Override public Flux<BooleanResponse<RenameCommand>> rename(Publisher<RenameCommand> commands) { return execute(commands, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getNewKey(), "New name must not be null!"); byte[] keyBuf = toByteArray(command.getKey()); byte[] newKeyBuf = toByteArray(command.getNewKey()); if (executorService.getConnectionManager().calcSlot(keyBuf) == executorService.getConnectionManager().calcSlot(newKeyBuf)) { return super.rename(commands); } return read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.DUMP, keyBuf) .filter(Objects::nonNull) .zipWith( Mono.defer(() -> pTtl(command.getKey()) .filter(Objects::nonNull) .map(ttl -> Math.max(0, ttl)) .switchIfEmpty(Mono.just(0L)) ) ) .flatMap(valueAndTtl -> { return write(newKeyBuf, StringCodec.INSTANCE, RedisCommands.RESTORE, newKeyBuf, valueAndTtl.getT2(), valueAndTtl.getT1()); }) .thenReturn(new BooleanResponse<>(command, true)) .doOnSuccess((ignored) -> del(command.getKey())); }); }
@Test public void testRename() { connection.stringCommands().set(originalKey, value).block(); if (hasTtl) { connection.keyCommands().expire(originalKey, Duration.ofSeconds(1000)).block(); } Integer originalSlot = getSlotForKey(originalKey); newKey = getNewKeyForSlot(new String(originalKey.array()), getTargetSlot(originalSlot)); Boolean response = connection.keyCommands().rename(originalKey, newKey).block(); assertThat(response).isTrue(); final ByteBuffer newKeyValue = connection.stringCommands().get(newKey).block(); assertThat(newKeyValue).isEqualTo(value); if (hasTtl) { assertThat(connection.keyCommands().ttl(newKey).block()).isGreaterThan(0); } else { assertThat(connection.keyCommands().ttl(newKey).block()).isEqualTo(-1); } }
public static boolean isEmpty(byte[] data) { return data == null || data.length == 0; }
@Test void isEmpty() { byte[] bytes = ByteUtils.toBytes(""); assertTrue(ByteUtils.isEmpty(bytes)); byte[] byte2 = new byte[1024]; assertFalse(ByteUtils.isEmpty(byte2)); byte[] byte3 = null; assertTrue(ByteUtils.isEmpty(byte3)); }
public static BufferedImage grayscaleImage(final BufferedImage image) { final Image grayImage = GrayFilter.createDisabledImage(image); return ImageUtil.bufferedImageFromImage(grayImage); }
@Test public void grayscaleImage() { final BufferedImage[] grayscaleColors = new BufferedImage[] { oneByOne(WHITE), oneByOne(GRAY), oneByOne(BLACK), oneByOne(BLACK_HALF_TRANSPARENT), oneByOne(BLACK_TRANSPARENT), }; final BufferedImage[] nonGrayscaleColors = new BufferedImage[] { oneByOne(RED), oneByOne(GREEN), oneByOne(BLUE), }; for (BufferedImage image : grayscaleColors) { assertTrue(isGrayscale(image)); } for (BufferedImage image : nonGrayscaleColors) { assertFalse(isGrayscale(image)); } for (BufferedImage image : ArrayUtils.addAll(grayscaleColors, nonGrayscaleColors)) { assertTrue(isGrayscale(ImageUtil.grayscaleImage(image))); } }
@Override public InterpreterResult interpret(String st, InterpreterContext context) { return helper.interpret(session, st, context); }
@Test void should_execute_statement_with_timestamp_option() throws Exception { // Given String statement1 = "INSERT INTO zeppelin.ts(key,val) VALUES('k','v1');"; String statement2 = "@timestamp=15\n" + "INSERT INTO zeppelin.ts(key,val) VALUES('k','v2');"; CqlSession session = EmbeddedCassandraServerHelper.getSession(); // Insert v1 with current timestamp interpreter.interpret(statement1, intrContext); System.out.println("going to read data from zeppelin.ts;"); session.execute("SELECT val FROM zeppelin.ts LIMIT 1") .forEach(x -> System.out.println("row " + x)); Thread.sleep(1); // When // Insert v2 with past timestamp interpreter.interpret(statement2, intrContext); System.out.println("going to read data from zeppelin.ts;"); session.execute("SELECT val FROM zeppelin.ts LIMIT 1") .forEach(x -> System.out.println("row " + x)); final String actual = session.execute("SELECT val FROM zeppelin.ts LIMIT 1").one() .getString("val"); // Then assertEquals("v1", actual); }
private DeferredResult<ResponseEntity> getOtaPackageCallback(String deviceToken, String title, String version, int size, int chunk, OtaPackageType firmwareType) { DeferredResult<ResponseEntity> responseWriter = new DeferredResult<>(); transportContext.getTransportService().process(DeviceTransportType.DEFAULT, ValidateDeviceTokenRequestMsg.newBuilder().setToken(deviceToken).build(), new DeviceAuthCallback(transportContext, responseWriter, sessionInfo -> { TransportProtos.GetOtaPackageRequestMsg requestMsg = TransportProtos.GetOtaPackageRequestMsg.newBuilder() .setTenantIdMSB(sessionInfo.getTenantIdMSB()) .setTenantIdLSB(sessionInfo.getTenantIdLSB()) .setDeviceIdMSB(sessionInfo.getDeviceIdMSB()) .setDeviceIdLSB(sessionInfo.getDeviceIdLSB()) .setType(firmwareType.name()).build(); transportContext.getTransportService().process(sessionInfo, requestMsg, new GetOtaPackageCallback(transportContext, responseWriter, title, version, size, chunk)); })); return responseWriter; }
@Test void getOtaPackageCallback() { TransportContext transportContext = Mockito.mock(TransportContext.class); DeferredResult<ResponseEntity> responseWriter = Mockito.mock(DeferredResult.class); String title = "Title"; String version = "version"; int chunkSize = 11; int chunk = 3; var callback = new DeviceApiController.GetOtaPackageCallback(transportContext, responseWriter, title, version, chunkSize, chunk); callback.onError(new HttpMessageNotReadableException("JSON incorrect syntax")); callback.onError(new JsonParseException("Json ; expected")); callback.onError(new IOException("not found")); callback.onError(new RuntimeException("oops it is run time error")); }
@Override public C removeConfiguration(String configName) { if (configName.equals(DEFAULT_CONFIG)) { throw new IllegalArgumentException( "You cannot remove the default configuration"); } return this.configurations.remove(configName); }
@Test public void shouldNotAllowToRemoveDefaultConfiguration() { TestRegistry testRegistry = new TestRegistry(); assertThatThrownBy(() -> testRegistry.removeConfiguration("default")) .isInstanceOf(IllegalArgumentException.class); }
public long getMinConsumingFreshnessTimeMs() { return _brokerResponse.has(MIN_CONSUMING_FRESHNESS_TIME_MS) ? _brokerResponse.get(MIN_CONSUMING_FRESHNESS_TIME_MS) .asLong() : -1L; }
@Test public void testGetMinConsumingFreshnessTimeMs() { // Run the test final long result = _executionStatsUnderTest.getMinConsumingFreshnessTimeMs(); // Verify the results assertEquals(10L, result); }
@PublicAPI(usage = ACCESS) public Optional<? extends HasAnnotations<?>> tryGetPackageInfo() { return packageInfo; }
@Test public void test_tryGetPackageInfo() { JavaPackage annotatedPackage = importPackage("packageexamples.annotated"); JavaPackage nonAnnotatedPackage = importPackage("packageexamples"); assertThat(annotatedPackage.tryGetPackageInfo()).isPresent(); assertThat(nonAnnotatedPackage.tryGetPackageInfo()).isEmpty(); }
void prioritizeCopiesAndShiftUps(List<MigrationInfo> migrations) { for (int i = 0; i < migrations.size(); i++) { prioritize(migrations, i); } if (logger.isFinestEnabled()) { StringBuilder s = new StringBuilder("Migration order after prioritization: ["); int ix = 0; for (MigrationInfo migration : migrations) { s.append("\n\t").append(ix++).append("- ").append(migration).append(","); } s.deleteCharAt(s.length() - 1); s.append("]"); logger.finest(s.toString()); } }
@Test public void testShiftUpPrioritizationAgainstMove() throws UnknownHostException { List<MigrationInfo> migrations = new ArrayList<>(); final MigrationInfo migration1 = new MigrationInfo(0, null, new PartitionReplica(new Address("localhost", 5701), uuids[0]), -1, -1, -1, 0); final MigrationInfo migration2 = new MigrationInfo(0, null, new PartitionReplica(new Address("localhost", 5702), uuids[1]), -1, -1, -1, 1); final MigrationInfo migration3 = new MigrationInfo(0, new PartitionReplica(new Address("localhost", 5705), uuids[4]), new PartitionReplica(new Address("localhost", 5706), uuids[5]), 2, -1, -1, 3); final MigrationInfo migration4 = new MigrationInfo(0, null, new PartitionReplica(new Address("localhost", 5707), uuids[6]), -1, -1, 4, 2); migrations.add(migration1); migrations.add(migration2); migrations.add(migration3); migrations.add(migration4); migrationPlanner.prioritizeCopiesAndShiftUps(migrations); assertEquals(asList(migration1, migration2, migration4, migration3), migrations); }
@VisibleForTesting File untarAndMoveSegment(String segmentName, File segmentTarFile, File tempRootDir) throws IOException { return moveSegment(segmentName, untarSegment(segmentName, segmentTarFile, tempRootDir)); }
@Test public void testUntarAndMoveSegment() throws IOException { BaseTableDataManager tableDataManager = createTableManager(); File tempRootDir = tableDataManager.getTmpSegmentDataDir("test-untar-move"); // All input and intermediate files are put in the tempRootDir. File tempTar = new File(tempRootDir, SEGMENT_NAME + TarCompressionUtils.TAR_GZ_FILE_EXTENSION); File tempInputDir = new File(tempRootDir, "input"); FileUtils.write(new File(tempInputDir, "tmp.txt"), "this is in segment dir"); TarCompressionUtils.createCompressedTarFile(tempInputDir, tempTar); FileUtils.deleteQuietly(tempInputDir); // The destination is the segment directory at the same level of tempRootDir. File untarredFile = tableDataManager.untarAndMoveSegment(SEGMENT_NAME, tempTar, tempRootDir); assertEquals(untarredFile, tableDataManager.getSegmentDataDir(SEGMENT_NAME)); assertEquals(FileUtils.readFileToString(new File(untarredFile, "tmp.txt")), "this is in segment dir"); try { tableDataManager.untarAndMoveSegment(SEGMENT_NAME, new File(tempRootDir, "unknown.txt"), TEMP_DIR); fail(); } catch (Exception e) { // expected. } }
public static BigDecimal cast(final Integer value, final int precision, final int scale) { if (value == null) { return null; } return cast(value.longValue(), precision, scale); }
@Test public void shouldCastDecimalRoundingUp() { // When: final BigDecimal decimal = DecimalUtil.cast(new BigDecimal("1.19"), 2, 1); // Then: assertThat(decimal, is(new BigDecimal("1.2"))); }
@Override public Class<? extends UuidGenerator> getUuidGeneratorClass() { return configurationParameters .get(UUID_GENERATOR_PROPERTY_NAME, UuidGeneratorParser::parseUuidGenerator) .orElse(null); }
@Test void uuidGenerator() { ConfigurationParameters configurationParameters = new MapConfigurationParameters( Constants.UUID_GENERATOR_PROPERTY_NAME, IncrementingUuidGenerator.class.getName()); assertThat(new CucumberEngineOptions(configurationParameters).getUuidGeneratorClass(), is(IncrementingUuidGenerator.class)); }
public Destination[] createDestinations(int destCount) throws JMSException { final String destName = getClient().getDestName(); ArrayList<Destination> destinations = new ArrayList<>(); if (destName.contains(DESTINATION_SEPARATOR)) { if (getClient().isDestComposite() && (destCount == 1)) { // user was explicit about which destinations to make composite String[] simpleNames = mapToSimpleNames(destName.split(DESTINATION_SEPARATOR)); String joinedSimpleNames = join(simpleNames, DESTINATION_SEPARATOR); // use the type of the 1st destination for the Destination instance byte destinationType = getDestinationType(destName); destinations.add(createCompositeDestination(destinationType, joinedSimpleNames, 1)); } else { LOG.info("User requested multiple destinations, splitting: {}", destName); // either composite with multiple destinations to be suffixed // or multiple non-composite destinations String[] destinationNames = destName.split(DESTINATION_SEPARATOR); for (String splitDestName : destinationNames) { addDestinations(destinations, splitDestName, destCount); } } } else { addDestinations(destinations, destName, destCount); } return destinations.toArray(new Destination[] {}); }
@Test public void testCreateDestinations_compositeQueue() throws JMSException { clientProperties.setDestComposite(true); clientProperties.setDestName("queue://" + DEFAULT_DEST); Destination[] destinations = jmsClient.createDestinations(2); assertEquals(1, destinations.length); // suffixes should be added String expectedDestName = DEFAULT_DEST + ".0," + DEFAULT_DEST + ".1"; assertDestinationNameType(expectedDestName, QUEUE_TYPE, asAmqDest(destinations[0])); }
@Override public void close() { isOpen.set(false); Iterator<Future<?>> iterator = futures.iterator(); while (iterator.hasNext()) { Future<?> future = iterator.next(); iterator.remove(); if (!future.isDone() && !future.isCancelled() && !future.cancel(true)) { log.warn("Could not cancel " + future); } } if (shutdownOnClose) { executorService.shutdownNow(); } }
@Test public void testBasicRunnable() throws InterruptedException { try { CloseableExecutorService service = new CloseableExecutorService(executorService); CountDownLatch startLatch = new CountDownLatch(QTY); CountDownLatch latch = new CountDownLatch(QTY); for (int i = 0; i < QTY; ++i) { submitRunnable(service, startLatch, latch); } assertTrue(startLatch.await(3, TimeUnit.SECONDS)); service.close(); assertTrue(latch.await(3, TimeUnit.SECONDS)); } catch (AssertionError e) { throw e; } catch (Throwable e) { e.printStackTrace(); } }
public Result doWork() { if (prepared) throw new IllegalStateException("Call doWork only once!"); prepared = true; if (!graph.isFrozen()) { throw new IllegalStateException("Given BaseGraph has not been frozen yet"); } if (chStore.getShortcuts() > 0) { throw new IllegalStateException("Given CHStore already contains shortcuts"); } allSW.start(); initFromGraph(); runGraphContraction(); allSW.stop(); logFinalGraphStats(); return new Result( chConfig, chStore, nodeContractor.getAddedShortcutsCount(), lazyUpdateSW.getCurrentSeconds(), periodicUpdateSW.getCurrentSeconds(), neighborUpdateSW.getCurrentSeconds(), allSW.getMillis() ); }
@Test public void testCircleBug() { // /--1 // -0--/ // | g.edge(0, 1).setDistance(10).set(speedEnc, 60, 60); g.edge(0, 1).setDistance(4).set(speedEnc, 60, 60); g.edge(0, 2).setDistance(10).set(speedEnc, 60, 60); g.edge(0, 3).setDistance(10).set(speedEnc, 60, 60); PrepareContractionHierarchies prepare = createPrepareContractionHierarchies(g); PrepareContractionHierarchies.Result result = prepare.doWork(); assertEquals(0, result.getShortcuts()); }
synchronized void recalculatePartitions() { delayedTasks.values().forEach(future -> future.cancel(false)); delayedTasks.clear(); partitionService.recalculatePartitions(serviceInfoProvider.getServiceInfo(), getOtherServers()); }
@Test public void startAnotherNodeDuringRestartTest() throws Exception { var anotherInfo = TransportProtos.ServiceInfo.newBuilder().setServiceId("tb-transport").build(); var anotherData = new ChildData("/thingsboard/nodes/0000000030", null, anotherInfo.toByteArray()); startNode(childData); verify(partitionService, times(1)).recalculatePartitions(eq(currentInfo), eq(List.of(childInfo))); reset(partitionService); stopNode(childData); assertEquals(1, zkDiscoveryService.delayedTasks.size()); startNode(anotherData); assertTrue(zkDiscoveryService.delayedTasks.isEmpty()); verify(partitionService, times(1)).recalculatePartitions(eq(currentInfo), eq(List.of(anotherInfo))); reset(partitionService); Thread.sleep(RECALCULATE_DELAY * 2); verify(partitionService, never()).recalculatePartitions(any(), any()); startNode(childData); verify(partitionService, times(1)).recalculatePartitions(eq(currentInfo), eq(List.of(anotherInfo, childInfo))); }
@SuppressWarnings("unchecked") public Mono<RateLimiterResponse> isAllowed(final String id, final RateLimiterHandle limiterHandle) { double replenishRate = limiterHandle.getReplenishRate(); double burstCapacity = limiterHandle.getBurstCapacity(); double requestCount = limiterHandle.getRequestCount(); RateLimiterAlgorithm<?> rateLimiterAlgorithm = RateLimiterAlgorithmFactory.newInstance(limiterHandle.getAlgorithmName()); RedisScript<?> script = rateLimiterAlgorithm.getScript(); List<String> keys = rateLimiterAlgorithm.getKeys(id); List<String> scriptArgs = Stream.of(replenishRate, burstCapacity, Instant.now().getEpochSecond(), requestCount).map(String::valueOf).collect(Collectors.toList()); Flux<List<Long>> resultFlux = Singleton.INST.get(ReactiveRedisTemplate.class).execute(script, keys, scriptArgs); return resultFlux.onErrorResume(throwable -> Flux.just(Arrays.asList(1L, -1L))) .reduce(new ArrayList<Long>(), (longs, l) -> { longs.addAll(l); return longs; }).map(results -> { boolean allowed = results.get(0) == 1L; Long tokensLeft = results.get(1); return new RateLimiterResponse(allowed, tokensLeft, keys); }) .doOnError(throwable -> { rateLimiterAlgorithm.callback(rateLimiterAlgorithm.getScript(), keys, scriptArgs); LOG.error("Error occurred while judging if user is allowed by RedisRateLimiter:{}", throwable.getMessage()); }); }
@Test public void slidingWindowAllowedTest() { slidingWindowPreInit(1L, 200L); rateLimiterHandle.setAlgorithmName("slidingWindow"); Mono<RateLimiterResponse> responseMono = redisRateLimiter.isAllowed(DEFAULT_TEST_ID, rateLimiterHandle); StepVerifier.create(responseMono).assertNext(r -> { assertThat(r.getTokensRemaining(), is((long) DEFAULT_TEST_BURST_CAPACITY - 100L)); assertTrue(r.isAllowed()); }).verifyComplete(); }
@SneakyThrows public static Optional<Date> nextExecutionDate( TimeTrigger trigger, Date startDate, String uniqueId) { CronTimeTrigger cronTimeTrigger = getCronTimeTrigger(trigger); if (cronTimeTrigger != null) { CronExpression cronExpression = TriggerHelper.buildCron(cronTimeTrigger.getCron(), cronTimeTrigger.getTimezone()); Date nextTime = cronExpression.getNextValidTimeAfter(startDate); if (nextTime != null) { nextTime.setTime( nextTime.getTime() + getDelayInSeconds(cronTimeTrigger, uniqueId) * TimeTrigger.MS_IN_SECONDS); } return Optional.ofNullable(nextTime); } throw new UnsupportedOperationException( "TimeTrigger nextExecutionDate is not implemented for type: " + trigger.getType()); }
@Test public void testNextExecutionDateForInterval() throws Exception { TimeTrigger trigger = loadObject("fixtures/time_triggers/sample-interval-time-trigger.json", TimeTrigger.class); AssertHelper.assertThrows( "TimeTrigger nextExecutionDate is not implemented", UnsupportedOperationException.class, "TimeTrigger nextExecutionDate is not implemented for type: INTERVAL", () -> TriggerHelper.nextExecutionDate(trigger, Date.from(Instant.EPOCH), "test-id")); }
@Override public synchronized void write(int b) throws IOException { if (MBYTES[match] == b) { // another byte matched. Good. Keep going... match++; if (match == MBYTES.length) { // don't send MARK to the output, but instead notify the callback onMarkFound(); match = 0; } } else { if (match > 0) { // only matched partially. send the partial match that we held off down the pipe base.write(MBYTES, 0, match); match = 0; // this might match the first byte in MARK, so retry. write(b); } else { base.write(b); } } }
@Test public void oneByOne() throws IOException { m.write('1'); writeOneByOne(mark); m.write('2'); assertCount(1); assertOutput("12"); }
@Override public void execute(String commandName, BufferedReader reader, BufferedWriter writer) throws Py4JException, IOException { char subCommand = safeReadLine(reader).charAt(0); String returnCommand = null; if (subCommand == LIST_SLICE_SUB_COMMAND_NAME) { returnCommand = slice_list(reader); } else if (subCommand == LIST_CONCAT_SUB_COMMAND_NAME) { returnCommand = concat_list(reader); } else if (subCommand == LIST_MULT_SUB_COMMAND_NAME) { returnCommand = mult_list(reader); } else if (subCommand == LIST_IMULT_SUB_COMMAND_NAME) { returnCommand = imult_list(reader); } else if (subCommand == LIST_COUNT_SUB_COMMAND_NAME) { returnCommand = count_list(reader); } else { returnCommand = call_collections_method(reader, subCommand); } logger.finest("Returning command: " + returnCommand); writer.write(returnCommand); writer.flush(); }
@Test public void testSortException() { String inputCommand = ListCommand.LIST_SORT_SUB_COMMAND_NAME + "\n" + target2 + "\ne\n"; try { command.execute("l", new BufferedReader(new StringReader(inputCommand)), writer); assertEquals("!x\n", sWriter.toString()); } catch (Exception e) { e.printStackTrace(); fail(); } }
@VisibleForTesting static String convertProtoPropertyNameToJavaPropertyName(String input) { boolean capitalizeNextLetter = true; Preconditions.checkArgument(!Strings.isNullOrEmpty(input)); StringBuilder result = new StringBuilder(input.length()); for (int i = 0; i < input.length(); i++) { final char c = input.charAt(i); if (Character.isLowerCase(c)) { if (capitalizeNextLetter) { result.append(Character.toUpperCase(c)); } else { result.append(c); } capitalizeNextLetter = false; } else if (Character.isUpperCase(c)) { if (i == 0 && !capitalizeNextLetter) { // Force first letter to lower-case unless explicitly told to // capitalize it. result.append(Character.toLowerCase(c)); } else { // Capital letters after the first are left as-is. result.append(c); } capitalizeNextLetter = false; } else if ('0' <= c && c <= '9') { result.append(c); capitalizeNextLetter = true; } else { capitalizeNextLetter = true; } } // Add a trailing "_" if the name should be altered. if (input.charAt(input.length() - 1) == '#') { result.append('_'); } return result.toString(); }
@Test public void testGetterNameCreationForProtoPropertyWithNumber() { Assert.assertEquals( JAVA_PROPERTY_FOR_PROTO_PROPERTY_WITH_NUMBER, ProtoByteBuddyUtils.convertProtoPropertyNameToJavaPropertyName(PROTO_PROPERTY_WITH_NUMBER)); }
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try { defaultMQAdminExt.start(); String brokerAddr = commandLine.hasOption('b') ? commandLine.getOptionValue('b').trim() : null; String clusterName = commandLine.hasOption('c') ? commandLine.getOptionValue('c').trim() : null; if (brokerAddr != null) { printBrokerRuntimeStats(defaultMQAdminExt, brokerAddr, false); } else if (clusterName != null) { Set<String> masterSet = CommandUtil.fetchMasterAndSlaveAddrByClusterName(defaultMQAdminExt, clusterName); for (String ba : masterSet) { try { printBrokerRuntimeStats(defaultMQAdminExt, ba, true); } catch (Exception e) { e.printStackTrace(); } } } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
@Test public void testExecute() throws SubCommandException { BrokerStatusSubCommand cmd = new BrokerStatusSubCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[] {"-b 127.0.0.1:" + listenPort()}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); cmd.execute(commandLine, options, null); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() < 1) { onInvalidDataReceived(device, data); return; } // Decode the new data int offset = 0; final int flags = data.getByte(offset); offset += 1; final boolean wheelRevPresent = (flags & 0x01) != 0; final boolean crankRevPreset = (flags & 0x02) != 0; if (data.size() < 1 + (wheelRevPresent ? 6 : 0) + (crankRevPreset ? 4 : 0)) { onInvalidDataReceived(device, data); return; } if (wheelRevPresent) { final long wheelRevolutions = data.getIntValue(Data.FORMAT_UINT32_LE, offset) & 0xFFFFFFFFL; offset += 4; final int lastWheelEventTime = data.getIntValue(Data.FORMAT_UINT16_LE, offset); // 1/1024 s offset += 2; if (mInitialWheelRevolutions < 0) mInitialWheelRevolutions = wheelRevolutions; // Notify listener about the new measurement onWheelMeasurementReceived(device, wheelRevolutions, lastWheelEventTime); } if (crankRevPreset) { final int crankRevolutions = data.getIntValue(Data.FORMAT_UINT16_LE, offset); offset += 2; final int lastCrankEventTime = data.getIntValue(Data.FORMAT_UINT16_LE, offset); // offset += 2; // Notify listener about the new measurement onCrankMeasurementReceived(device, crankRevolutions, lastCrankEventTime); } }
@Test public void onCrankDataChanged_onlyCrankData() { final DataReceivedCallback callback = new CyclingSpeedAndCadenceMeasurementDataCallback() { @Override public void onInvalidDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { assertEquals("Correct CSC data reported as invalid", 1, 2); } @Override public void onDistanceChanged(@NonNull final BluetoothDevice device, final float totalDistance, final float distance, final float speed) { assertEquals("Crank data not available and reported", 1, 2); } @Override public void onCrankDataChanged(@NonNull final BluetoothDevice device, final float crankCadence, final float gearRatio) { assertEquals("Crank cadence", 60.0f, crankCadence, 0); assertEquals("Gear ratio", 0.0f, gearRatio, 0); // Gear ration not available, as no wheel data } }; final MutableData data = new MutableData(new byte[5]); // Flags assertTrue(data.setByte(0x02, 0)); // Crank revolutions assertTrue(data.setValue(10, Data.FORMAT_UINT16_LE, 1)); assertTrue(data.setValue(0, Data.FORMAT_UINT16_LE, 3)); callback.onDataReceived(null, data); // Update crank revolutions assertTrue(data.setValue(11, Data.FORMAT_UINT16_LE, 1)); assertTrue(data.setValue(1024, Data.FORMAT_UINT16_LE, 3)); // 1 second callback.onDataReceived(null, data); }
public Long getRejectCountNum() { return rejectCount.get(); }
@Test public void testGetRejectCountNum() { TaskRejectCountRecordPlugin plugin = new TaskRejectCountRecordPlugin(); Assert.assertEquals((Long) 0L, plugin.getRejectCountNum()); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) throws InterpreterException { LOGGER.info("Running SQL query: '{}' over Pandas DataFrame", st); return pythonInterpreter.interpret( "z.show(pysqldf('" + st.trim() + "'))", context); }
@Test public void testInIPython() throws IOException, InterpreterException { InterpreterResult ret = pythonInterpreter.interpret("import pandas as pd\nimport numpy as np", context); assertEquals(InterpreterResult.Code.SUCCESS, ret.code(), ret.message().toString()); // DataFrame df2 \w test data ret = pythonInterpreter.interpret("df2 = pd.DataFrame({ 'age' : np.array([33, 51, 51, 34]), " + "'name' : pd.Categorical(['moon','jobs','gates','park'])})", context); assertEquals(InterpreterResult.Code.SUCCESS, ret.code(), ret.message().toString()); // when ret = pandasSqlInterpreter.interpret("select name, age from df2 where age < 40", context); // then assertEquals(InterpreterResult.Code.SUCCESS, ret.code(), context.out.toString()); assertEquals(Type.TABLE, context.out.toInterpreterResultMessage().get(1).getType(), context.out.toString()); assertTrue(context.out.toString().indexOf("moon\t33") > 0); assertTrue(context.out.toString().indexOf("park\t34") > 0); assertEquals(InterpreterResult.Code.SUCCESS, pandasSqlInterpreter.interpret( "select case when name==\"aa\" then name else name end from df2", context).code()); }
@Override public <T> T convert(DataTable dataTable, Type type) { return convert(dataTable, type, false); }
@Test void convert_to_table__table_transformer_takes_precedence_over_identity_transform() { DataTable table = parse("", " | | 1 | 2 | 3 |", " | A | ♘ | | ♝ |", " | B | | | |", " | C | | ♝ | |"); DataTable expected = emptyDataTable(); registry.defineDataTableType(new DataTableType(DataTable.class, (DataTable raw) -> expected)); assertSame(expected, converter.convert(table, DataTable.class)); }
public MultimapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderForValues() { return usingConfig(config.ignoringRepeatedFieldOrder()); }
@Test public void testFluent_containsExactly_noArgs() { expectThat(ImmutableMultimap.<Object, Message>of()) .ignoringRepeatedFieldOrderForValues() .containsExactly(); expectThat(ImmutableMultimap.<Object, Message>of()) .ignoringRepeatedFieldOrderForValues() .containsExactly() .inOrder(); expectFailureWhenTesting() .that(multimapOf(1, message1)) .ignoringRepeatedFieldOrderForValues() .containsExactly(); expectThatFailure().isNotNull(); }
public static Optional<String> urlEncode(String raw) { try { return Optional.of(URLEncoder.encode(raw, UTF_8.toString())); } catch (UnsupportedEncodingException e) { return Optional.empty(); } }
@Test public void urlEncode_whenComplexEncoding_encodesCorrectly() { assertThat(urlEncode("£")).hasValue("%C2%A3"); assertThat(urlEncode("つ")).hasValue("%E3%81%A4"); assertThat(urlEncode("äëïöüÿ")).hasValue("%C3%A4%C3%AB%C3%AF%C3%B6%C3%BC%C3%BF"); assertThat(urlEncode("ÄËÏÖÜŸ")).hasValue("%C3%84%C3%8B%C3%8F%C3%96%C3%9C%C5%B8"); }
static String extractPluginRawIdentifier(final JsonNode node) { JsonNode type = node.get(TYPE); if (type == null || type.textValue().isEmpty()) { return null; } return type.textValue(); }
@Test void shouldReturnNullPluginIdentifierGivenNullType() { Assertions.assertNull(PluginDeserializer.extractPluginRawIdentifier(new TextNode(null))); }
public ClusterStatsResponse clusterStats() { return execute(() -> { Request request = new Request("GET", "/_cluster/stats"); Response response = restHighLevelClient.getLowLevelClient().performRequest(request); return ClusterStatsResponse.toClusterStatsResponse(gson.fromJson(EntityUtils.toString(response.getEntity()), JsonObject.class)); }); }
@Test public void newInstance_whenKeyStorePassed_shouldCreateClient() throws GeneralSecurityException, IOException { mockWebServer.enqueue(new MockResponse() .setResponseCode(200) .setBody(EXAMPLE_CLUSTER_STATS_JSON) .setHeader("Content-Type", "application/json")); Path keyStorePath = temp.newFile("keystore.p12").toPath(); String password = "password"; HandshakeCertificates certificate = createCertificate(mockWebServer.getHostName(), keyStorePath, password); mockWebServer.useHttps(certificate.sslSocketFactory(), false); EsClient underTest = new EsClient(null, keyStorePath.toString(), password, new HttpHost(mockWebServer.getHostName(), mockWebServer.getPort(), "https")); assertThat(underTest.clusterStats()).isNotNull(); }
public ServiceConfigURL build() { if (StringUtils.isEmpty(username) && StringUtils.isNotEmpty(password)) { throw new IllegalArgumentException("Invalid url, password without username!"); } port = Math.max(port, 0); // trim the leading "/" int firstNonSlash = 0; if (path != null) { while (firstNonSlash < path.length() && path.charAt(firstNonSlash) == '/') { firstNonSlash++; } if (firstNonSlash >= path.length()) { path = ""; } else if (firstNonSlash > 0) { path = path.substring(firstNonSlash); } } return new ServiceConfigURL(protocol, username, password, host, port, path, parameters, attributes); }
@Test void testNoArgConstructor() { URL url = new URLBuilder().build(); assertThat(url.toString(), equalTo("")); }
@Override public String toString() { return "DataflowRunner#" + options.getJobName(); }
@Test public void testToString() { assertEquals( "TestDataflowRunner#TestAppName", TestDataflowRunner.fromOptions(options).toString()); }
@VisibleForTesting static boolean isValidJavaClass(String className) { for (String part : Splitter.on('.').split(className)) { if (!SourceVersion.isIdentifier(part)) { return false; } } return true; }
@Test public void testValidJavaClassRegex() { Assert.assertTrue(MainClassResolver.isValidJavaClass("my.Class")); Assert.assertTrue(MainClassResolver.isValidJavaClass("my.java_Class$valid")); Assert.assertTrue(MainClassResolver.isValidJavaClass("multiple.package.items")); Assert.assertTrue(MainClassResolver.isValidJavaClass("is123.valid")); Assert.assertFalse(MainClassResolver.isValidJavaClass("${start-class}")); Assert.assertFalse(MainClassResolver.isValidJavaClass("123not.Valid")); Assert.assertFalse(MainClassResolver.isValidJavaClass("{class}")); Assert.assertFalse(MainClassResolver.isValidJavaClass("not valid")); }
public BackgroundException map(final IOException failure, final Path directory) { return super.map("Connection failed", failure, directory); }
@Test public void testMap() { assertEquals(ConnectionRefusedException.class, new DefaultIOExceptionMappingService().map(new SocketException("Software caused connection abort")).getClass()); assertEquals(ConnectionRefusedException.class, new DefaultIOExceptionMappingService().map(new SocketException("Socket closed")).getClass()); }
public static String toJson(Message message) { StringWriter json = new StringWriter(); try (JsonWriter jsonWriter = JsonWriter.of(json)) { write(message, jsonWriter); } return json.toString(); }
@Test public void test_primitive_types() { PrimitiveTypeMsg protobuf = PrimitiveTypeMsg.newBuilder() .setStringField("foo") .setIntField(10) .setLongField(100L) .setDoubleField(3.14) .setBooleanField(true) .setEnumField(org.sonar.core.test.Test.FakeEnum.GREEN) .build(); assertThat(toJson(protobuf)).isEqualTo( "{\"stringField\":\"foo\",\"intField\":10,\"longField\":100,\"doubleField\":3.14,\"booleanField\":true,\"enumField\":\"GREEN\"}"); }
public static boolean matchingKeys(final PrivateKey privateKey, final PublicKey publicKey) throws GeneralSecurityException { Signature sign = Signature.getInstance(SIGNING_ALGORITHM); byte[] bytes = SAMPLE_CHALLANGE.getBytes(StandardCharsets.UTF_8); sign.initSign(privateKey); sign.update(bytes); byte[] signature = sign.sign(); sign.initVerify(publicKey); sign.update(bytes); return sign.verify(signature); }
@Test void testMatchingPrivatePublicKeysInvalid() throws Exception { KeyPairGenerator keyGen1 = KeyPairGenerator.getInstance(KEY_GENERATION_ALGORITHM); keyGen1.initialize(2048); final java.security.KeyPair keyPair1 = keyGen1.genKeyPair(); KeyPairGenerator keyGen2 = KeyPairGenerator.getInstance(KEY_GENERATION_ALGORITHM); keyGen2.initialize(2048); final java.security.KeyPair keyPair2 = keyGen2.genKeyPair(); //mixing keys from different pairs Assertions.assertThat(KeystoreUtils.matchingKeys(keyPair1.getPrivate(), keyPair2.getPublic())).isFalse(); }
public void resetPositionsIfNeeded() { Map<TopicPartition, Long> offsetResetTimestamps = offsetFetcherUtils.getOffsetResetTimestamp(); if (offsetResetTimestamps.isEmpty()) return; resetPositionsAsync(offsetResetTimestamps); }
@Test public void testGetOffsetsFencedLeaderEpoch() { buildFetcher(); subscriptions.assignFromUser(singleton(tp0)); client.updateMetadata(initialUpdateResponse); subscriptions.requestOffsetReset(tp0, OffsetResetStrategy.LATEST); client.prepareResponse(listOffsetResponse(Errors.FENCED_LEADER_EPOCH, 1L, 5L)); offsetFetcher.resetPositionsIfNeeded(); consumerClient.pollNoWakeup(); assertTrue(subscriptions.isOffsetResetNeeded(tp0)); assertFalse(subscriptions.isFetchable(tp0)); assertFalse(subscriptions.hasValidPosition(tp0)); assertEquals(0L, metadata.timeToNextUpdate(time.milliseconds())); }
public static <@NonNull E> CompletableSource resolveScopeFromLifecycle( final LifecycleScopeProvider<E> provider) throws OutsideScopeException { return resolveScopeFromLifecycle(provider, true); }
@Test public void resolveScopeFromLifecycle_normal_comparable() { PublishSubject<NegativeComparableInteger> lifecycle = PublishSubject.create(); TestObserver<?> o = testSource(resolveScopeFromLifecycle(lifecycle, new NegativeComparableInteger(3))); lifecycle.onNext(new NegativeComparableInteger(-1)); o.assertNoErrors().assertNotComplete(); lifecycle.onNext(new NegativeComparableInteger(-2)); o.assertNoErrors().assertNotComplete(); lifecycle.onNext(new NegativeComparableInteger(3)); o.assertNoErrors().assertNotComplete(); // Now we end lifecycle.onNext(new NegativeComparableInteger(-3)); o.assertComplete(); }
public Optional<ContentPack> findByIdAndRevision(ModelId id, int revision) { final DBQuery.Query query = DBQuery.is(Identified.FIELD_META_ID, id).is(Revisioned.FIELD_META_REVISION, revision); return Optional.ofNullable(dbCollection.findOne(query)); }
@Test @MongoDBFixtures("ContentPackPersistenceServiceTest.json") public void findByIdAndRevision() { final Optional<ContentPack> contentPack = contentPackPersistenceService.findByIdAndRevision(ModelId.of("dcd74ede-6832-4ef7-9f69-deadbeef0000"), 2); assertThat(contentPack) .isPresent() .get() .matches(c -> c.id().equals(ModelId.of("dcd74ede-6832-4ef7-9f69-deadbeef0000"))); }
static ExecutorService getConfiguredExecutorService( CamelContext camelContext, String name, DynamicRouterConfiguration cfg, boolean useDefault) throws IllegalArgumentException { ExecutorServiceManager manager = camelContext.getExecutorServiceManager(); ObjectHelper.notNull(manager, ESM_NAME, camelContext); String exSvcRef = cfg.getExecutorService(); ExecutorService exSvcBean = cfg.getExecutorServiceBean(); String errorMessage = "ExecutorServiceRef '" + exSvcRef + "' not found in registry as an ExecutorService " + "instance or as a thread pool profile"; // The first (preferred) option is to use an explicitly-configured executor if the configuration has it return Optional.ofNullable(exSvcBean) // The second preference is to check for an executor service reference .or(() -> Optional.ofNullable(exSvcRef) // Try to get the referenced executor service .map(r -> lookupExecutorServiceRef(camelContext, name, cfg, r) // But, if the reference is specified in the config, // and could not be obtained, this is an error .orElseThrow(() -> new IllegalArgumentException(errorMessage)))) // The third and final option is to create a new "default" thread pool if the parameter // specifies to that the default thread pool should be used as a fallback .or(() -> useDefault ? Optional.of(manager.newDefaultThreadPool(cfg, name)) : Optional.empty()) // failing the above options, then no executor service is configured .orElse(null); }
@Test void testGetConfiguredExecutorServiceWithDefault() { when(mockConfig.getExecutorServiceBean()).thenReturn(null); when(mockConfig.getExecutorService()).thenReturn(null); when(camelContext.getExecutorServiceManager()).thenReturn(manager); when(manager.newDefaultThreadPool(mockConfig, "someName")).thenReturn(newThreadPool); ExecutorService result = DynamicRouterRecipientListHelper.getConfiguredExecutorService(camelContext, "someName", mockConfig, true); assertEquals(newThreadPool, result); }