focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
private AlarmId(DeviceId id, String uniqueIdentifier) { super(id.toString() + ":" + uniqueIdentifier); checkNotNull(id, "device id must not be null"); checkNotNull(uniqueIdentifier, "unique identifier must not be null"); checkArgument(!uniqueIdentifier.isEmpty(), "unique identifier must not be empty"); }
@Test public void testEquality() { final AlarmId id1 = AlarmId.alarmId(DEVICE_ID, UNIQUE_ID_1); final AlarmId id2 = AlarmId.alarmId(DEVICE_ID, UNIQUE_ID_1); assertThat(id1, is(id2)); }
public static PredicateTreeAnnotations createPredicateTreeAnnotations(Predicate predicate) { PredicateTreeAnalyzerResult analyzerResult = PredicateTreeAnalyzer.analyzePredicateTree(predicate); // The tree size is used as the interval range. int intervalEnd = analyzerResult.treeSize; AnnotatorContext context = new AnnotatorContext(intervalEnd, analyzerResult.sizeMap); assignIntervalLabels(predicate, Interval.INTERVAL_BEGIN, intervalEnd, false, context); return new PredicateTreeAnnotations( analyzerResult.minFeature, intervalEnd, context.intervals, context.intervalsWithBounds, context.featureConjunctions); }
@Test void require_that_featureconjunctions_are_registered_and_given_an_interval() { Predicate p = and( or( range("key", partition("key=10-19"), partition("key=20-29"), edgePartition("key=0", 5, 10, 20), edgePartition("key=30", 0, 0, 3)), conj( not(feature("keyA").inSet("C")), feature("keyB").inSet("D"))), feature("foo").inSet("bar")); PredicateTreeAnnotations r = PredicateTreeAnnotator.createPredicateTreeAnnotations(p); assertEquals(2, r.minFeature); assertEquals(3, r.intervalEnd); assertEquals(3, r.intervalMap.size()); assertEquals(2, r.boundsMap.size()); assertEquals(1, r.featureConjunctions.size()); Map.Entry<IndexableFeatureConjunction, List<Integer>> entry = r.featureConjunctions.entrySet().iterator().next(); assertEquals(1, entry.getValue().size()); assertEquals(0b1_0000000000000010, entry.getValue().get(0).longValue()); }
@Override public ColumnMetadataImpl buildMetadata(VirtualColumnContext context) { ColumnMetadataImpl.Builder builder = getColumnMetadataBuilder(context).setCardinality(1).setHasDictionary(true); if (context.getFieldSpec().isSingleValueField()) { builder.setSorted(true); } else { // When there is no value for a multi-value column, the maxNumberOfMultiValues and cardinality should be // set as 1 because the MV column bitmap uses 1 to delimit the rows for a MV column. Each MV column will have a // default null value based on column's data type builder.setMaxNumberOfMultiValues(1); } FieldSpec fieldSpec = context.getFieldSpec(); Object defaultNullValue = fieldSpec.getDefaultNullValue(); switch (fieldSpec.getDataType().getStoredType()) { case INT: builder.setMinValue((int) defaultNullValue).setMaxValue((int) defaultNullValue); break; case LONG: builder.setMinValue((long) defaultNullValue).setMaxValue((long) defaultNullValue); break; case FLOAT: builder.setMinValue((float) defaultNullValue).setMaxValue((float) defaultNullValue); break; case DOUBLE: builder.setMinValue((double) defaultNullValue).setMaxValue((double) defaultNullValue); break; case BIG_DECIMAL: builder.setMinValue((BigDecimal) defaultNullValue).setMaxValue((BigDecimal) defaultNullValue); break; case STRING: builder.setMinValue((String) defaultNullValue).setMaxValue((String) defaultNullValue); break; case BYTES: builder.setMinValue(new ByteArray((byte[]) defaultNullValue)) .setMaxValue(new ByteArray((byte[]) defaultNullValue)); break; default: throw new IllegalStateException(); } return builder.build(); }
@Test public void testBuildMetadata() { assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(SV_INT, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(SV_INT).setTotalDocs(1).setCardinality(1).setSorted(true) .setHasDictionary(true).setMinValue((int) SV_INT.getDefaultNullValue()) .setMaxValue((int) SV_INT.getDefaultNullValue()).build()); assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(SV_LONG, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(SV_LONG).setTotalDocs(1).setCardinality(1).setSorted(true) .setHasDictionary(true).setMinValue((long) SV_LONG.getDefaultNullValue()) .setMaxValue((long) SV_LONG.getDefaultNullValue()).build()); assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(SV_FLOAT, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(SV_FLOAT).setTotalDocs(1).setCardinality(1).setSorted(true) .setHasDictionary(true).setMinValue((float) SV_FLOAT.getDefaultNullValue()) .setMaxValue((float) SV_FLOAT.getDefaultNullValue()).build()); assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(SV_DOUBLE, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(SV_DOUBLE).setTotalDocs(1).setCardinality(1).setSorted(true) .setHasDictionary(true).setMinValue((double) SV_DOUBLE.getDefaultNullValue()) .setMaxValue((double) SV_DOUBLE.getDefaultNullValue()).build()); assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(SV_STRING, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(SV_STRING).setTotalDocs(1).setCardinality(1).setSorted(true) .setHasDictionary(true).setMinValue((String) SV_STRING.getDefaultNullValue()) .setMaxValue((String) SV_STRING.getDefaultNullValue()).build()); assertEquals( new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(SV_STRING_WITH_DEFAULT, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(SV_STRING_WITH_DEFAULT).setTotalDocs(1).setCardinality(1) .setSorted(true).setHasDictionary(true).setMinValue("default").setMaxValue("default").build()); assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(SV_BYTES, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(SV_BYTES).setTotalDocs(1).setCardinality(1).setSorted(true) .setHasDictionary(true).setMinValue(new ByteArray((byte[]) SV_BYTES.getDefaultNullValue())) .setMaxValue(new ByteArray((byte[]) SV_BYTES.getDefaultNullValue())).build()); assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(MV_INT, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(MV_INT).setTotalDocs(1).setCardinality(1).setSorted(false) .setHasDictionary(true).setMaxNumberOfMultiValues(1).setMinValue((int) MV_INT.getDefaultNullValue()) .setMaxValue((int) MV_INT.getDefaultNullValue()).build()); assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(MV_LONG, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(MV_LONG).setTotalDocs(1).setCardinality(1).setSorted(false) .setHasDictionary(true).setMaxNumberOfMultiValues(1).setMinValue((long) MV_LONG.getDefaultNullValue()) .setMaxValue((long) MV_LONG.getDefaultNullValue()).build()); assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(MV_FLOAT, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(MV_FLOAT).setTotalDocs(1).setCardinality(1).setSorted(false) .setHasDictionary(true).setMaxNumberOfMultiValues(1).setMinValue((float) MV_FLOAT.getDefaultNullValue()) .setMaxValue((float) MV_FLOAT.getDefaultNullValue()).build()); assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(MV_DOUBLE, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(MV_DOUBLE).setTotalDocs(1).setCardinality(1).setSorted(false) .setHasDictionary(true).setMaxNumberOfMultiValues(1).setMinValue((double) MV_DOUBLE.getDefaultNullValue()) .setMaxValue((double) MV_DOUBLE.getDefaultNullValue()).build()); assertEquals(new DefaultNullValueVirtualColumnProvider().buildMetadata(new VirtualColumnContext(MV_STRING, 1)), new ColumnMetadataImpl.Builder().setFieldSpec(MV_STRING).setTotalDocs(1).setCardinality(1).setSorted(false) .setHasDictionary(true).setMaxNumberOfMultiValues(1).setMinValue((String) MV_STRING.getDefaultNullValue()) .setMaxValue((String) MV_STRING.getDefaultNullValue()).build()); }
public boolean appliesTo(Component project, @Nullable MetricEvaluationResult metricEvaluationResult) { return metricEvaluationResult != null && metricEvaluationResult.evaluationResult.level() != Measure.Level.OK && METRICS_TO_IGNORE_ON_SMALL_CHANGESETS.contains(metricEvaluationResult.condition.getMetric().getKey()) && config.getConfiguration().getBoolean(CoreProperties.QUALITY_GATE_IGNORE_SMALL_CHANGES).orElse(true) && isSmallChangeset(project); }
@Test public void dont_ignore_errors_about_new_coverage_for_small_changesets_if_disabled() { mapSettings.setProperty(CoreProperties.QUALITY_GATE_IGNORE_SMALL_CHANGES, false); QualityGateMeasuresStep.MetricEvaluationResult metricEvaluationResult = generateEvaluationResult(NEW_COVERAGE_KEY, ERROR); Component project = generateNewRootProject(); measureRepository.addRawMeasure(PROJECT_REF, CoreMetrics.NEW_LINES_KEY, newMeasureBuilder().create(19)); boolean result = underTest.appliesTo(project, metricEvaluationResult); assertThat(result).isFalse(); }
public static Version loadSQVersion(System2 system) { return getVersion(system, SQ_VERSION_FILE_PATH); }
@Test void load_sq_version_from_file_in_classpath() { Version version = MetadataLoader.loadSQVersion(System2.INSTANCE); assertThat(version).isNotNull(); assertThat(version.major()).isGreaterThanOrEqualTo(5); }
public static Map<String, PartitionColumnFilter> convertColumnFilter(List<ScalarOperator> predicates) { return convertColumnFilter(predicates, null); }
@Test public void convertColumnFilterExpr() { List<ScalarOperator> list = buildOperator("day", BinaryType.EQ); Map<String, PartitionColumnFilter> result = ColumnFilterConverter.convertColumnFilter(list); assertEquals(0, result.size()); OlapTable olapTable = buildOlapTable("day"); Map<String, PartitionColumnFilter> result1 = ColumnFilterConverter.convertColumnFilter(list, olapTable); assertEquals(1, result1.size()); }
public static ParamType getSchemaFromType(final Type type) { return getSchemaFromType(type, JAVA_TO_ARG_TYPE); }
@Test public void shouldGetGenericFunction() throws NoSuchMethodException { // Given: final Type genericType = getClass().getMethod("genericFunctionType").getGenericReturnType(); // When: final ParamType returnType = UdfUtil.getSchemaFromType(genericType); // Then: assertThat(returnType, is(LambdaType.of(ImmutableList.of(GenericType.of("T")), GenericType.of("U")))); }
public static boolean isPlainHttp(NetworkService networkService) { checkNotNull(networkService); var isWebService = isWebService(networkService); var isKnownServiceName = IS_PLAIN_HTTP_BY_KNOWN_WEB_SERVICE_NAME.containsKey( Ascii.toLowerCase(networkService.getServiceName())); var doesNotSupportAnySslVersion = networkService.getSupportedSslVersionsCount() == 0; if (!isKnownServiceName) { return isWebService && doesNotSupportAnySslVersion; } var isKnownPlainHttpService = IS_PLAIN_HTTP_BY_KNOWN_WEB_SERVICE_NAME.getOrDefault( Ascii.toLowerCase(networkService.getServiceName()), false); return isKnownPlainHttpService && doesNotSupportAnySslVersion; }
@Test public void isPlainHttp_whenHttpServiceWithSslVersions_returnsFalse() { assertThat( NetworkServiceUtils.isPlainHttp( NetworkService.newBuilder() .setServiceName("http") .addSupportedSslVersions("SSLV3") .build())) .isFalse(); }
public static SAXParserFactory newSecureSAXParserFactory() throws SAXException, ParserConfigurationException { SAXParserFactory spf = SAXParserFactory.newInstance(); spf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); spf.setFeature(DISALLOW_DOCTYPE_DECL, true); spf.setFeature(LOAD_EXTERNAL_DECL, false); spf.setFeature(EXTERNAL_GENERAL_ENTITIES, false); spf.setFeature(EXTERNAL_PARAMETER_ENTITIES, false); return spf; }
@Test(expected = SAXException.class) public void testExternalDtdWithSecureSAXParserFactory() throws Exception { SAXParser parser = XMLUtils.newSecureSAXParserFactory().newSAXParser(); try (InputStream stream = getResourceStream("/xml/external-dtd.xml")) { parser.parse(stream, new DefaultHandler()); } }
public static long sizeOf(Path path) throws IOException { SizeVisitor visitor = new SizeVisitor(); Files.walkFileTree(path, visitor); return visitor.size; }
@Test public void sizeOf_throws_IOE_if_path_does_not_exist() throws IOException { Path path = temporaryFolder.newFile().toPath(); Files.delete(path); assertThatThrownBy(() -> FileUtils2.sizeOf(path)) .isInstanceOf(IOException.class); }
public ForbiddenException() { super(STATUS, NAME); }
@Test public void testForbiddenException() throws Exception { try { throw new ForbiddenException(); } catch (ForbiddenException e) { assertEquals(e.getStatus(), 403); assertEquals(e.getName(), "Forbidden"); } }
public Model<T> reproduceFromProvenance() throws ClassNotFoundException { // Until now the object only holds the configuration for these objects, the following // functions will actually re-instantiate them. Trainer<T> newTrainer = recoverTrainer(); Dataset<T> newDataset = recoverDataset(); // Exposing the configuration manager means there could be an edge case were // the invocation count is changed before the model is trained. // Pass through a desired invocation count to prevent this behavior // TODO: does not apply to inner trainers, figure out how to address this or if it needs to be addressed int trainedInvocationCount = (int) this.modelProvenance .getTrainerProvenance() .getInstanceValues() .get("train-invocation-count") .getValue(); // This function actually re-trains a model rather than copy the original return newTrainer.train(newDataset); }
@Test public void reproduceTransformTrainer() throws URISyntaxException, ClassNotFoundException { CSVDataSource<Label> csvSource = getCSVDataSource(); TrainTestSplitter<Label> splitter = new TrainTestSplitter<>(csvSource); MutableDataset<Label> datasetFromCSV = new MutableDataset<>(splitter.getTrain()); MutableDataset<Label> testData = new MutableDataset<>(splitter.getTest()); LogisticRegressionTrainer trainer = new LogisticRegressionTrainer(); TransformationMap transformations = new TransformationMap(List.of(new LinearScalingTransformation(0,1))); TransformTrainer<Label> transformed = new TransformTrainer<>(trainer, transformations); Model<Label> transformedModel = transformed.train(datasetFromCSV); ReproUtil<Label> reproUtil = new ReproUtil<>(transformedModel.getProvenance(),Label.class); Model<Label> newModel = reproUtil.reproduceFromProvenance(); LabelEvaluator evaluator = new LabelEvaluator(); LabelEvaluation oldEvaluation = evaluator.evaluate(transformedModel, testData); LabelEvaluation newEvaluation = evaluator.evaluate(newModel, testData); assertEquals(oldEvaluation.toString(), newEvaluation.toString()); }
public static IPMappingAddress ipv6MappingAddress(IpPrefix ip) { return new IPMappingAddress(ip, MappingAddress.Type.IPV6); }
@Test public void testIpv6MappingAddressMethod() { IpPrefix ipv6 = IpPrefix.valueOf("fe80::1/64"); MappingAddress mappingAddress = MappingAddresses.ipv6MappingAddress(ipv6); IPMappingAddress ipMappingAddress = checkAndConvert(mappingAddress, MappingAddress.Type.IPV6, IPMappingAddress.class); assertThat(ipMappingAddress.ip(), is(equalTo(ipv6))); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return delegate.find(file, listener); } if(cache.isValid(file.getParent())) { final AttributedList<Path> list = cache.get(file.getParent()); final Path found = list.find(new ListFilteringFeature.ListFilteringPredicate(sensitivity, file)); if(null != found) { if(log.isDebugEnabled()) { log.debug(String.format("Return cached attributes %s for %s", found.attributes(), file)); } return found.attributes(); } if(log.isDebugEnabled()) { log.debug(String.format("Cached directory listing does not contain %s", file)); } throw new NotfoundException(file.getAbsolute()); } final CachingListProgressListener caching = new CachingListProgressListener(cache); try { final PathAttributes attr = delegate.find(file, new ProxyListProgressListener(listener, caching)); caching.cache(); return attr; } catch(NotfoundException e) { caching.cache(); throw e; } }
@Test public void testFindRoot() throws Exception { final PathCache cache = new PathCache(1); final Path root = new Path("/", EnumSet.of(Path.Type.directory)); cache.put(root, AttributedList.emptyList()); assertTrue(cache.isCached(root)); assertEquals(0, cache.get(root).size()); final CachingAttributesFinderFeature feature = new CachingAttributesFinderFeature(Protocol.Case.sensitive, cache, new AttributesFinder() { @Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { listener.chunk(file.getParent(), new AttributedList<>(Collections.singletonList(file))); return file.attributes(); } }); assertEquals(PathAttributes.EMPTY, feature.find(root, new DisabledListProgressListener())); assertEquals(1, cache.size()); assertTrue(cache.isCached(root)); assertEquals(0, cache.get(root).size()); }
static int validatePubsubMessageSize(PubsubMessage message, int maxPublishBatchSize) throws SizeLimitExceededException { int payloadSize = message.getPayload().length; if (payloadSize > PUBSUB_MESSAGE_DATA_MAX_BYTES) { throw new SizeLimitExceededException( "Pubsub message data field of length " + payloadSize + " exceeds maximum of " + PUBSUB_MESSAGE_DATA_MAX_BYTES + " bytes. See https://cloud.google.com/pubsub/quotas#resource_limits"); } int totalSize = payloadSize; @Nullable Map<String, String> attributes = message.getAttributeMap(); if (attributes != null) { if (attributes.size() > PUBSUB_MESSAGE_MAX_ATTRIBUTES) { throw new SizeLimitExceededException( "Pubsub message contains " + attributes.size() + " attributes which exceeds the maximum of " + PUBSUB_MESSAGE_MAX_ATTRIBUTES + ". See https://cloud.google.com/pubsub/quotas#resource_limits"); } // Consider attribute encoding overhead, so it doesn't go over the request limits totalSize += attributes.size() * PUBSUB_MESSAGE_ATTRIBUTE_ENCODE_ADDITIONAL_BYTES; for (Map.Entry<String, String> attribute : attributes.entrySet()) { String key = attribute.getKey(); int keySize = key.getBytes(StandardCharsets.UTF_8).length; if (keySize > PUBSUB_MESSAGE_ATTRIBUTE_MAX_KEY_BYTES) { throw new SizeLimitExceededException( "Pubsub message attribute key '" + key + "' exceeds the maximum of " + PUBSUB_MESSAGE_ATTRIBUTE_MAX_KEY_BYTES + " bytes. See https://cloud.google.com/pubsub/quotas#resource_limits"); } totalSize += keySize; String value = attribute.getValue(); int valueSize = value.getBytes(StandardCharsets.UTF_8).length; if (valueSize > PUBSUB_MESSAGE_ATTRIBUTE_MAX_VALUE_BYTES) { throw new SizeLimitExceededException( "Pubsub message attribute value for key '" + key + "' starting with '" + value.substring(0, Math.min(256, value.length())) + "' exceeds the maximum of " + PUBSUB_MESSAGE_ATTRIBUTE_MAX_VALUE_BYTES + " bytes. See https://cloud.google.com/pubsub/quotas#resource_limits"); } totalSize += valueSize; } } if (totalSize > maxPublishBatchSize) { throw new SizeLimitExceededException( "Pubsub message of length " + totalSize + " exceeds maximum of " + maxPublishBatchSize + " bytes, when considering the payload and attributes. " + "See https://cloud.google.com/pubsub/quotas#resource_limits"); } return totalSize; }
@Test public void testValidatePubsubMessageSizePayloadTooLarge() { byte[] data = new byte[(10 << 20) + 1]; PubsubMessage message = new PubsubMessage(data, null); assertThrows( SizeLimitExceededException.class, () -> PreparePubsubWriteDoFn.validatePubsubMessageSize( message, PUBSUB_MESSAGE_MAX_TOTAL_SIZE)); }
@Override public void validate() { if (!pluginServerFilenames.isEmpty() || !pluginServerPorts.isEmpty()) { if (pluginServerFilenames != null && !pluginServerFilenames.isEmpty()) { for (String pluginServerFilename : pluginServerFilenames) { if (!Files.exists(Paths.get(pluginServerFilename))) { throw new ParameterException( String.format("Language server path %s does not exist", pluginServerFilename)); } } } if (pluginServerPorts != null && !pluginServerPorts.isEmpty()) { for (String pluginServerPort : pluginServerPorts) { try { int port = Integer.parseInt(pluginServerPort); if (!(port <= NetworkEndpointUtils.MAX_PORT_NUMBER && port > 0)) { throw new ParameterException( String.format( "Port out of range. Expected [0, %s], actual %s.", NetworkEndpointUtils.MAX_PORT_NUMBER, pluginServerPort)); } } catch (NumberFormatException e) { throw new ParameterException( String.format("Port number must be an integer. Got %s instead.", pluginServerPort), e); } } } var pathCounts = pluginServerFilenames == null ? 0 : pluginServerFilenames.size(); var portCounts = pluginServerPorts == null ? 0 : pluginServerPorts.size(); if (pathCounts != portCounts) { throw new ParameterException( String.format( "Number of plugin server paths must be equal to number of plugin server ports." + " Paths: %s. Ports: %s.", pathCounts, portCounts)); } if (!pluginServerRpcDeadlineSeconds.isEmpty()) { if (pluginServerRpcDeadlineSeconds.size() != pathCounts) { throw new ParameterException( String.format( "Number of plugin server rpc deadlines must be equal to number of plugin server" + " ports. Paths: %s. Ports: %s. Deadlines: %s", pathCounts, portCounts, pluginServerRpcDeadlineSeconds.size())); } } } if (!remotePluginServerAddress.isEmpty()) { var addrCounts = remotePluginServerAddress.size(); var portCounts = remotePluginServerPort.size(); if (addrCounts != portCounts) { throw new ParameterException( String.format( "Number of remote plugin server paths must be equal to number of plugin server " + "ports. Addresses: %s. Ports: %s.", addrCounts, portCounts)); } if (!remotePluginServerRpcDeadlineSeconds.isEmpty()) { if (remotePluginServerRpcDeadlineSeconds.size() != addrCounts) { throw new ParameterException( String.format( "Number of plugin server rpc deadlines must be equal to number of plugin server" + " ports. Paths: %s. Ports: %s. Deadlines: %s", addrCounts, portCounts, pluginServerRpcDeadlineSeconds.size())); } } for (int port : remotePluginServerPort) { if (!(port <= NetworkEndpointUtils.MAX_PORT_NUMBER && port > 0)) { throw new ParameterException( String.format( "Remote plugin server port out of range. Expected [0, %s], actual %s.", NetworkEndpointUtils.MAX_PORT_NUMBER, port)); } } } }
@Test public void validate_whenPythonPluginServerValidNumberOfDeadlines_succeeds() { LanguageServerOptions options = new LanguageServerOptions(); options.remotePluginServerAddress = ImmutableList.of("127.0.0.1"); options.remotePluginServerPort = ImmutableList.of(10000); options.remotePluginServerRpcDeadlineSeconds = ImmutableList.of(150); options.validate(); }
public boolean hasConfigRepo(String configRepoId) { return this.getConfigRepo(configRepoId) != null; }
@Test public void shouldReturnFalseIfDoesNotContainTheConfigRepoWithTheSpecifiedId() { assertThat(repos.hasConfigRepo("unknown"), is(false)); }
private Mono<ServerResponse> getPostByName(ServerRequest request) { final var name = request.pathVariable("name"); return postFinder.getByName(name) .switchIfEmpty(Mono.error(() -> new NotFoundException("Post not found"))) .flatMap(post -> ServerResponse.ok().contentType(MediaType.APPLICATION_JSON) .bodyValue(post) ); }
@Test public void getPostByName() { Metadata metadata = new Metadata(); metadata.setName("test"); PostVo post = PostVo.builder() .metadata(metadata) .build(); when(postFinder.getByName(anyString())).thenReturn(Mono.just(post)); webClient.get().uri("/posts/{name}", "test") .exchange() .expectStatus().isOk() .expectHeader().contentType(MediaType.APPLICATION_JSON) .expectBody() .jsonPath("$.metadata.name").isEqualTo("test"); verify(postFinder).getByName(anyString()); }
public DdlCommandResult execute( final String sql, final DdlCommand ddlCommand, final boolean withQuery, final Set<SourceName> withQuerySources ) { return execute(sql, ddlCommand, withQuery, withQuerySources, false); }
@Test public void shouldWarnAddDuplicateStreamWithoutReplace() { // Given: givenCreateStream(); cmdExec.execute(SQL_TEXT, createStream, false, NO_QUERY_SOURCES); // When: givenCreateStream(SCHEMA2, false); final DdlCommandResult result =cmdExec.execute(SQL_TEXT, createStream, false, NO_QUERY_SOURCES); // Then: assertThat("Expected successful execution", result.isSuccess()); assertThat(result.getMessage(), containsString("A stream with the same name already exists")); }
@Override public boolean matchToken(TokenQueue tokenQueue, List<Token> matchedTokenList) { if (!tokenQueue.isNextTokenValue(lToken)) { return false; } int stack = 0; while (tokenQueue.peek() != null) { Token token = tokenQueue.poll(); if (lToken.equals(token.getValue())) { stack++; } else if (rToken.equals(token.getValue())) { stack--; } matchedTokenList.add(token); if (stack == 0) { return true; } } return false; }
@Test public void shouldNotMatchWhenNoRight() { Token t1 = new Token("(", 1, 1); TokenQueue tokenQueue = spy(new TokenQueue(Arrays.asList(t1))); List<Token> output = mock(List.class); BridgeTokenMatcher matcher = new BridgeTokenMatcher("(", ")"); assertThat(matcher.matchToken(tokenQueue, output), is(false)); verify(tokenQueue, times(1)).isNextTokenValue("("); verify(tokenQueue, times(1)).poll(); verify(tokenQueue, times(2)).peek(); verifyNoMoreInteractions(tokenQueue); verify(output).add(t1); verifyNoMoreInteractions(output); }
@Override public void d(String tag, String message, Object... args) { Log.d(tag, formatString(message, args)); }
@Test public void debugWithThrowableLoggedCorrectly() { String expectedMessage = "Hello World"; Throwable t = new Throwable("Test Throwable"); logger.d(t, tag, "Hello %s", "World"); assertLogged(DEBUG, tag, expectedMessage, t); }
@Override public Type classify(final Throwable e) { final Type type = SchemaRegistryUtil.isSubjectNotFoundErrorCode(e) ? Type.USER : Type.UNKNOWN; if (type == Type.USER) { LOG.info( "Classified error as USER error based on missing SR subject. Query ID: {} Exception: {}", queryId, e); } return type; }
@Test public void shouldClassifyNoMissingSubjectAsUnknownErrorCode() { // Given: final Exception e = new RestClientException("foo", 401, 40101); // When: final QueryError.Type type = new MissingSubjectClassifier("").classify(e); // Then: assertThat(type, is(QueryError.Type.UNKNOWN)); }
public static long totalSwapSpace() { return TOTAL_SWAP_SPACE; }
@Test public void testTotalSwapSpace() { assertTrue(totalSwapSpace() >= -1); }
@Override public Result invoke(Invocation invocation) throws RpcException { Result result; String value = getUrl().getMethodParameter( RpcUtils.getMethodName(invocation), MOCK_KEY, Boolean.FALSE.toString()) .trim(); if (ConfigUtils.isEmpty(value)) { // no mock result = this.invoker.invoke(invocation); } else if (value.startsWith(FORCE_KEY)) { if (logger.isWarnEnabled()) { logger.warn( CLUSTER_FAILED_MOCK_REQUEST, "force mock", "", "force-mock: " + RpcUtils.getMethodName(invocation) + " force-mock enabled , url : " + getUrl()); } // force:direct mock result = doMockInvoke(invocation, null); } else { // fail-mock try { result = this.invoker.invoke(invocation); // fix:#4585 if (result.getException() != null && result.getException() instanceof RpcException) { RpcException rpcException = (RpcException) result.getException(); if (rpcException.isBiz()) { throw rpcException; } else { result = doMockInvoke(invocation, rpcException); } } } catch (RpcException e) { if (e.isBiz()) { throw e; } if (logger.isWarnEnabled()) { logger.warn( CLUSTER_FAILED_MOCK_REQUEST, "failed to mock invoke", "", "fail-mock: " + RpcUtils.getMethodName(invocation) + " fail-mock enabled , url : " + getUrl(), e); } result = doMockInvoke(invocation, e); } } return result; }
@Test void testMockInvokerFromOverride_Invoke_Fock_WithFailDefault() { URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName()) .addParameter( REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=fail:return z" + "&" + "getSomething.mock=fail:return x" + "&" + "getSomething2.mock=force:return y")) .addParameter("invoke_return_error", "true"); Invoker<IHelloService> cluster = getClusterInvoker(url); // Configured with mock RpcInvocation invocation = new RpcInvocation(); invocation.setMethodName("getSomething"); Result ret = cluster.invoke(invocation); Assertions.assertEquals("x", ret.getValue()); // If no mock was configured, return null directly invocation = new RpcInvocation(); invocation.setMethodName("getSomething2"); ret = cluster.invoke(invocation); Assertions.assertEquals("y", ret.getValue()); // If no mock was configured, return null directly invocation = new RpcInvocation(); invocation.setMethodName("getSomething3"); ret = cluster.invoke(invocation); Assertions.assertEquals("z", ret.getValue()); // If no mock was configured, return null directly invocation = new RpcInvocation(); invocation.setMethodName("sayHello"); ret = cluster.invoke(invocation); Assertions.assertEquals("z", ret.getValue()); }
public static int[] colMax(int[][] matrix) { int[] x = new int[matrix[0].length]; Arrays.fill(x, Integer.MIN_VALUE); for (int[] row : matrix) { for (int j = 0; j < x.length; j++) { if (x[j] < row[j]) { x[j] = row[j]; } } } return x; }
@Test public void testColMax() { System.out.println("colMax"); double[][] A = { {0.7220180, 0.07121225, 0.6881997}, {-0.2648886, -0.89044952, 0.3700456}, {-0.6391588, 0.44947578, 0.6240573} }; double[] r = {0.7220180, 0.44947578, 0.6881997}; double[] result = MathEx.colMax(A); for (int i = 0; i < r.length; i++) { assertEquals(result[i], r[i], 1E-7); } }
public String objectTypePairsToString() { StringBuilder result = new StringBuilder(); for ( Map.Entry entry : objectTypePairs.entrySet() ) { if ( entry.getKey() != null ) { result.append( Const.CR ); result.append( "\"" ); result.append( entry.getKey() ); result.append( "\"" ); result.append( " [" ); result.append( entry.getValue() ); result.append( "] " ); } } return result.toString(); }
@Test public void testObjectTypePairsToString() throws Exception { Exception cause = new NullPointerException(); Map<String, RepositoryObjectType> notFoundedReferences = new LinkedHashMap<String, RepositoryObjectType>(); String pathToTransStub = "/path/Trans.ktr"; String pathToJobStub = "/path/Job.ktr"; notFoundedReferences.put( pathToTransStub, RepositoryObjectType.TRANSFORMATION ); notFoundedReferences.put( pathToJobStub, RepositoryObjectType.JOB ); String expectedOutput = System.lineSeparator() + "\"/path/Trans.ktr\" [transformation] " + System.lineSeparator() + "\"/path/Job.ktr\" [job] "; try { throw new LookupReferencesException( cause, notFoundedReferences ); } catch ( LookupReferencesException testedException ) { String actual = testedException.objectTypePairsToString(); assertEquals( expectedOutput, actual ); //check that cause exception was set assertNotNull( testedException.getCause() ); } }
public <T> void writeTo(T object, OutputStream entityStream) throws IOException { ObjectWriter writer = objectWriterByClass.get(object.getClass()); if (writer == null) { mapper.writeValue(entityStream, object); } else { writer.writeValue(entityStream, object); } }
@Test public void testApplicationsJacksonEncodeXStreamDecode() throws Exception { // Encode ByteArrayOutputStream captureStream = new ByteArrayOutputStream(); codec.writeTo(APPLICATIONS, captureStream); byte[] encoded = captureStream.toByteArray(); // Decode InputStream source = new ByteArrayInputStream(encoded); Applications decoded = (Applications) new EntityBodyConverter().read(source, Applications.class, MediaType.APPLICATION_JSON_TYPE); assertTrue(EurekaEntityComparators.equal(decoded, APPLICATIONS)); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { try { if(containerService.isContainer(file)) { final List<B2BucketResponse> buckets = session.getClient().listBuckets(); for(B2BucketResponse bucket : buckets) { if(StringUtils.equals(containerService.getContainer(file).getName(), bucket.getBucketName())) { return true; } } } else { try { attributes.find(file, listener); return true; } catch(NotfoundException e) { return false; } } return false; } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Failure to read attributes of {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Failure to read attributes of {0}", e, file); } }
@Test public void testFind() throws Exception { final Path bucket = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path file = new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final B2VersionIdProvider fileid = new B2VersionIdProvider(session); new B2TouchFeature(session, fileid).touch(file, new TransferStatus()); assertTrue(new B2FindFeature(session, fileid).find(file)); assertFalse(new B2FindFeature(session, fileid).find(new Path(bucket, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)))); new B2DeleteFeature(session, fileid).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override protected IdentifiedDataSerializable getConfig() { CacheSimpleConfig config = new CacheSimpleConfig(); config.setAsyncBackupCount(parameters.asyncBackupCount); config.setBackupCount(parameters.backupCount); config.setCacheEntryListeners(parameters.cacheEntryListeners); config.setCacheLoader(parameters.cacheLoader); config.setCacheLoaderFactory(parameters.cacheLoaderFactory); config.setCacheWriter(parameters.cacheWriter); config.setCacheWriterFactory(parameters.cacheWriterFactory); config.setDisablePerEntryInvalidationEvents(parameters.disablePerEntryInvalidationEvents); if (parameters.evictionConfig != null) { config.setEvictionConfig(parameters.evictionConfig.asEvictionConfig(serializationService)); } if (parameters.expiryPolicyFactoryClassName != null) { config.setExpiryPolicyFactory(parameters.expiryPolicyFactoryClassName); } else if (parameters.timedExpiryPolicyFactoryConfig != null) { ExpiryPolicyFactoryConfig expiryPolicyFactoryConfig = new ExpiryPolicyFactoryConfig(parameters.timedExpiryPolicyFactoryConfig); config.setExpiryPolicyFactoryConfig(expiryPolicyFactoryConfig); } if (parameters.eventJournalConfig != null) { config.setEventJournalConfig(parameters.eventJournalConfig); } if (parameters.hotRestartConfig != null) { config.setHotRestartConfig(parameters.hotRestartConfig); } config.setInMemoryFormat(InMemoryFormat.valueOf(parameters.inMemoryFormat)); config.setKeyType(parameters.keyType); config.setManagementEnabled(parameters.managementEnabled); if (parameters.mergePolicy != null) { config.setMergePolicyConfig(mergePolicyConfig(parameters.mergePolicy, parameters.mergeBatchSize)); } config.setName(parameters.name); if (parameters.partitionLostListenerConfigs != null && !parameters.partitionLostListenerConfigs.isEmpty()) { List<CachePartitionLostListenerConfig> listenerConfigs = (List<CachePartitionLostListenerConfig>) adaptListenerConfigs(parameters.partitionLostListenerConfigs, parameters.userCodeNamespace); config.setPartitionLostListenerConfigs(listenerConfigs); } else { config.setPartitionLostListenerConfigs(new ArrayList<>()); } config.setSplitBrainProtectionName(parameters.splitBrainProtectionName); config.setReadThrough(parameters.readThrough); config.setStatisticsEnabled(parameters.statisticsEnabled); config.setValueType(parameters.valueType); config.setWanReplicationRef(parameters.wanReplicationRef); config.setWriteThrough(parameters.writeThrough); if (parameters.isMerkleTreeConfigExists && parameters.merkleTreeConfig != null) { config.setMerkleTreeConfig(parameters.merkleTreeConfig); } if (parameters.isDataPersistenceConfigExists) { config.setDataPersistenceConfig(parameters.dataPersistenceConfig); } if (parameters.isUserCodeNamespaceExists) { config.setUserCodeNamespace(parameters.userCodeNamespace); } return config; }
@Test public void testDataPersistenceSubConfigTransmittedCorrectly() throws Exception { CacheConfig<Object, Object> cacheConfig = new CacheConfig<>("my-cache"); DataPersistenceConfig dataPersistenceConfig = new DataPersistenceConfig(); dataPersistenceConfig.setEnabled(true); dataPersistenceConfig.setFsync(true); cacheConfig.setDataPersistenceConfig(dataPersistenceConfig); ClientMessage addMapConfigClientMessage = DynamicConfigAddCacheConfigCodec.encodeRequest( cacheConfig.getName(), null, null, cacheConfig.isStatisticsEnabled(), cacheConfig.isManagementEnabled(), cacheConfig.isReadThrough(), cacheConfig.isWriteThrough(), null, null, null, null, cacheConfig.getBackupCount(), cacheConfig.getAsyncBackupCount(), cacheConfig.getInMemoryFormat().name(), null, null, 0, cacheConfig.isDisablePerEntryInvalidationEvents(), null, null, null, null, null, null, null, null, null, cacheConfig.getDataPersistenceConfig(), cacheConfig.getUserCodeNamespace() ); AddCacheConfigMessageTask addCacheConfigMessageTask = createMessageTask(addMapConfigClientMessage); addCacheConfigMessageTask.run(); CacheConfig<Object, Object> transmittedCacheConfig = new CacheConfig<>((CacheSimpleConfig) addCacheConfigMessageTask.getConfig()); assertEquals(cacheConfig, transmittedCacheConfig); }
public static Map<String, FormUrlEncoded.FormPart> read(String body) { var parts = new HashMap<String, FormUrlEncoded.FormPart>(); for (var s : body.split("&")) { if (s.isBlank()) { continue; } var pair = s.split("="); var name = URLDecoder.decode(pair[0].trim(), StandardCharsets.UTF_8); var part = parts.computeIfAbsent(name, n -> new FormUrlEncoded.FormPart(n, new ArrayList<>())); if (pair.length > 1) { var value = URLDecoder.decode(pair[1].trim(), StandardCharsets.UTF_8); part.values().add(value); } } return parts; }
@Test void testNoValue() { var string = "val1=2112&val1=3232&val2"; var map = FormUrlEncodedServerRequestMapper.read(string); assertThat(map) .hasSize(2) .hasEntrySatisfying("val1", v -> assertThat(v.values()).containsExactly("2112", "3232")) .hasEntrySatisfying("val2", v -> assertThat(v.values()).isEmpty()); }
@Override public int run(String launcherVersion, String launcherMd5, ServerUrlGenerator urlGenerator, Map<String, String> env, Map<String, String> context) { int exitValue = 0; LOG.info("Agent launcher is version: {}", CurrentGoCDVersion.getInstance().fullVersion()); String[] command = new String[]{}; try { AgentBootstrapperArgs bootstrapperArgs = AgentBootstrapperArgs.fromProperties(context); ServerBinaryDownloader agentDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); agentDownloader.downloadIfNecessary(DownloadableFile.AGENT); ServerBinaryDownloader pluginZipDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); pluginZipDownloader.downloadIfNecessary(DownloadableFile.AGENT_PLUGINS); ServerBinaryDownloader tfsImplDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); tfsImplDownloader.downloadIfNecessary(DownloadableFile.TFS_IMPL); command = agentInvocationCommand(agentDownloader.getMd5(), launcherMd5, pluginZipDownloader.getMd5(), tfsImplDownloader.getMd5(), env, context, agentDownloader.getExtraProperties()); LOG.info("Launching Agent with command: {}", join(command, " ")); Process agent = invoke(command); // The next lines prevent the child process from blocking on Windows AgentOutputAppender agentOutputAppenderForStdErr = new AgentOutputAppender(GO_AGENT_STDERR_LOG); AgentOutputAppender agentOutputAppenderForStdOut = new AgentOutputAppender(GO_AGENT_STDOUT_LOG); if (new SystemEnvironment().consoleOutToStdout()) { agentOutputAppenderForStdErr.writeTo(AgentOutputAppender.Outstream.STDERR); agentOutputAppenderForStdOut.writeTo(AgentOutputAppender.Outstream.STDOUT); } agent.getOutputStream().close(); AgentConsoleLogThread stdErrThd = new AgentConsoleLogThread(agent.getErrorStream(), agentOutputAppenderForStdErr); stdErrThd.start(); AgentConsoleLogThread stdOutThd = new AgentConsoleLogThread(agent.getInputStream(), agentOutputAppenderForStdOut); stdOutThd.start(); Shutdown shutdownHook = new Shutdown(agent); Runtime.getRuntime().addShutdownHook(shutdownHook); try { exitValue = agent.waitFor(); } catch (InterruptedException ie) { LOG.error("Agent was interrupted. Terminating agent and respawning. {}", ie.toString()); agent.destroy(); } finally { removeShutdownHook(shutdownHook); stdErrThd.stopAndJoin(); stdOutThd.stopAndJoin(); } } catch (Exception e) { LOG.error("Exception while executing command: {} - {}", join(command, " "), e.toString()); exitValue = EXCEPTION_OCCURRED; } return exitValue; }
@Test public void shouldLogInterruptOnAgentProcess() throws InterruptedException { final List<String> cmd = new ArrayList<>(); try (LogFixture logFixture = logFixtureFor(AgentProcessParentImpl.class, Level.DEBUG)) { Process subProcess = mockProcess(); when(subProcess.waitFor()).thenThrow(new InterruptedException("bang bang!")); AgentProcessParentImpl bootstrapper = createBootstrapper(cmd, subProcess); int returnCode = bootstrapper.run("bootstrapper_version", "bar", getURLGenerator(), new HashMap<>(), context()); assertThat(returnCode, is(0)); assertThat(logFixture.contains(Level.ERROR, "Agent was interrupted. Terminating agent and respawning. java.lang.InterruptedException: bang bang!"), is(true)); verify(subProcess).destroy(); } }
public static <K, V> Write<K, V> write() { return new AutoValue_CdapIO_Write.Builder<K, V>().build(); }
@Test public void testWriteObjectCreationFailsIfKeyClassIsNull() { assertThrows( IllegalArgumentException.class, () -> CdapIO.<String, String>write().withKeyClass(null)); }
void handleStatement(final QueuedCommand queuedCommand) { throwIfNotConfigured(); handleStatementWithTerminatedQueries( queuedCommand.getAndDeserializeCommand(commandDeserializer), queuedCommand.getAndDeserializeCommandId(), queuedCommand.getStatus(), Mode.EXECUTE, queuedCommand.getOffset(), false ); }
@Test public void shouldUpdateStatusOnCompletedPlannedCommand() { // Given: givenMockPlannedQuery(); // When: handleStatement( statementExecutorWithMocks, plannedCommand, COMMAND_ID, Optional.of(status), 0L ); // Then: final InOrder inOrder = Mockito.inOrder(status, mockEngine); inOrder.verify(status).setStatus( new CommandStatus(Status.EXECUTING, "Executing statement")); inOrder.verify(mockEngine).execute(any(), any(ConfiguredKsqlPlan.class), any(Boolean.class)); inOrder.verify(status).setFinalStatus( new CommandStatus(Status.SUCCESS, "Created query with ID qid", Optional.of(QUERY_ID))); }
public static Sensor suppressionEmitSensor(final String threadId, final String taskId, final String processorNodeId, final StreamsMetricsImpl streamsMetrics) { return throughputSensor( threadId, taskId, processorNodeId, SUPPRESSION_EMIT, SUPPRESSION_EMIT_RATE_DESCRIPTION, SUPPRESSION_EMIT_TOTAL_DESCRIPTION, RecordingLevel.DEBUG, streamsMetrics ); }
@Test public void shouldGetSuppressionEmitSensor() { final String metricNamePrefix = "suppression-emit"; final String descriptionOfCount = "The total number of emitted records from the suppression buffer"; final String descriptionOfRate = "The average number of emitted records from the suppression buffer per second"; when(streamsMetrics.nodeLevelSensor(THREAD_ID, TASK_ID, PROCESSOR_NODE_ID, metricNamePrefix, RecordingLevel.DEBUG)) .thenReturn(expectedSensor); when(streamsMetrics.nodeLevelTagMap(THREAD_ID, TASK_ID, PROCESSOR_NODE_ID)).thenReturn(tagMap); getAndVerifySensor( () -> ProcessorNodeMetrics.suppressionEmitSensor(THREAD_ID, TASK_ID, PROCESSOR_NODE_ID, streamsMetrics), metricNamePrefix, descriptionOfRate, descriptionOfCount ); }
@Override public void handleRequest(RestRequest request, RequestContext requestContext, Callback<RestResponse> callback) { //This code path cannot accept content types or accept types that contain //multipart/related. This is because these types of requests will usually have very large payloads and therefore //would degrade server performance since RestRequest reads everything into memory. if (!isMultipart(request, requestContext, callback)) { _restRestLiServer.handleRequest(request, requestContext, callback); } }
@Test(dataProvider = TestConstants.RESTLI_PROTOCOL_1_2_PREFIX + "protocolVersions") public void testMessageAndDetailsErrorFormat(final ProtocolVersion protocolVersion, final String errorResponseHeaderName, final RestOrStream restOrStream) throws Exception { final StatusCollectionResource statusResource = getMockResource(StatusCollectionResource.class); final DataMap details = new DataMap(); details.put("errorKey", "errorDetail"); EasyMock.expect(statusResource.get(eq(1L))).andThrow(new RestLiServiceException( HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Mock Exception").setErrorDetails(details)).once(); replay(statusResource); Callback<RestResponse> restResponseCallback = new Callback<RestResponse>() { @Override public void onSuccess(RestResponse restResponse) { fail(); } @Override public void onError(Throwable e) { assertTrue(e instanceof RestException); RestException restException = (RestException) e; RestResponse restResponse = restException.getResponse(); try { assertEquals(restResponse.getStatus(), 500); assertTrue(restResponse.getEntity().length() > 0); assertEquals(restResponse.getHeader(errorResponseHeaderName), RestConstants.HEADER_VALUE_ERROR); ErrorResponse responseBody = DataMapUtils.read(restResponse.getEntity().asInputStream(), ErrorResponse.class, restResponse.getHeaders()); // in this test, we're using the _serverWithCustomErrorResponseConfig (see below), which has been configure to use the // MESSAGE_AND_DETAILS ErrorResponseFormat, so stack trace and other error response parts should be absent assertEquals(responseBody.getMessage(), "Mock Exception"); assertEquals(responseBody.getErrorDetails().data().getString("errorKey"), "errorDetail"); assertFalse(responseBody.hasExceptionClass()); assertFalse(responseBody.hasStackTrace()); assertFalse(responseBody.hasStatus()); EasyMock.verify(statusResource); EasyMock.reset(statusResource); } catch (Exception e2) { fail(e2.toString()); } } }; if (restOrStream == RestOrStream.REST) { RestRequest request = new RestRequestBuilder(new URI("/statuses/1")) .setHeader(RestConstants.HEADER_RESTLI_PROTOCOL_VERSION, protocolVersion.toString()).build(); _serverWithCustomErrorResponseConfig.handleRequest(request, new RequestContext(), restResponseCallback); } else { StreamRequest streamRequest = new StreamRequestBuilder(new URI("/statuses/1")) .setHeader(RestConstants.HEADER_RESTLI_PROTOCOL_VERSION, protocolVersion.toString()) .build(EntityStreams.emptyStream()); Callback<StreamResponse> callback = new Callback<StreamResponse>() { @Override public void onSuccess(StreamResponse streamResponse) { fail(); } @Override public void onError(Throwable e) { Messages.toRestException((StreamException) e, new Callback<RestException>() { @Override public void onError(Throwable e) { Assert.fail(); } @Override public void onSuccess(RestException result) { restResponseCallback.onError(result); } }); } }; _serverWithCustomErrorResponseConfig.handleRequest(streamRequest, new RequestContext(), callback); } }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } CachedQueryEntry<?, ?> that = (CachedQueryEntry<?, ?>) o; return keyData.equals(that.keyData); }
@Test public void testEquals_givenOtherHasDifferentKey_thenReturnFalse() { CachedQueryEntry entry1 = createEntry("key1"); CachedQueryEntry entry2 = createEntry("key2"); assertFalse(entry1.equals(entry2)); }
private String hashPassword(String password) { for (PasswordType type : passwordTypes) { password = hashPassword(password, type); } return password; }
@Test public void hashPassword() throws Exception { assertEquals(MD5_PASSWORD, jdbcAuthProvider.hashPassword(PASSWORD, JDBCAuthProvider.PasswordType.md5)); assertEquals(SHA1_PASSWORD, jdbcAuthProvider.hashPassword(PASSWORD, JDBCAuthProvider.PasswordType.sha1)); assertEquals(SHA256_PASSWORD, jdbcAuthProvider.hashPassword(PASSWORD, JDBCAuthProvider.PasswordType.sha256)); assertEquals(SHA512_PASSWORD, jdbcAuthProvider.hashPassword(PASSWORD, JDBCAuthProvider.PasswordType.sha512)); assertNotEquals(BCRYPTED_PASSWORD, jdbcAuthProvider.hashPassword(PASSWORD, JDBCAuthProvider.PasswordType.bcrypt)); assertTrue(OpenBSDBCrypt.checkPassword(BCRYPTED_PASSWORD, PASSWORD.toCharArray())); }
protected static String encrypt(String... args) throws Exception { int iterations = args.length == 2 ? Integer.parseInt(args[1]) : DEFAULT_ITERATIONS; EncryptionReplacer replacer = new EncryptionReplacer(); String xmlPath = System.getProperty("hazelcast.config"); Properties properties = xmlPath == null ? System.getProperties() : loadPropertiesFromConfig(new FileInputStream(xmlPath)); replacer.init(properties); String encrypted = replacer.encrypt(args[0], iterations); String variable = "$" + replacer.getPrefix() + "{" + encrypted + "}"; return variable; }
@Test public void testClientGenerateEncrypted() throws Exception { assumeDefaultAlgorithmsSupported(); String xml = "<hazelcast-client xmlns=\"http://www.hazelcast.com/schema/client-config\">\n" + XML_DEFAULT_CONFIG + "</hazelcast-client>"; File configFile = createFileWithString(xml); hazelcastConfigProperty.setOrClearProperty(configFile.getAbsolutePath()); String encrypted = encrypt("test"); assertThat(encrypted) .startsWith("$ENC{") .endsWith("}"); }
@Override public Write.Append append(final Path file, final TransferStatus status) throws BackgroundException { try { final S3DefaultMultipartService multipartService = new S3DefaultMultipartService(session); final List<MultipartUpload> upload = multipartService.find(file); if(!upload.isEmpty()) { Long size = 0L; for(MultipartPart completed : multipartService.list(upload.iterator().next())) { size += completed.getSize(); } return new Write.Append(true).withStatus(status).withOffset(size); } } catch(AccessDeniedException | InteroperabilityException e) { log.warn(String.format("Ignore failure listing incomplete multipart uploads. %s", e)); } return Write.override; }
@Test public void testSize() throws Exception { final S3AccessControlListFeature acl = new S3AccessControlListFeature(session); final S3MultipartUploadService feature = new S3MultipartUploadService(session, new S3WriteFeature(session, acl), acl, 5 * 1024L * 1024L, 5); final Write.Append append = feature.append(new Path("/p", EnumSet.of(Path.Type.file)), new TransferStatus().withLength(0L).withRemote(new PathAttributes().withSize(3L))); assertFalse(append.append); assertEquals(0L, append.offset, 0L); }
public static boolean isValidRootUrl(String url) { UrlValidator validator = new CustomUrlValidator(); return validator.isValid(url); }
@Test @Issue("SECURITY-1471") public void ensureJavaScriptSchemaIsNotAllowed() { assertFalse(UrlHelper.isValidRootUrl("javascript:alert(123)")); }
@Override public void handle(final RoutingContext routingContext) { if (routingContext.request().isSSL()) { final String indicatedServerName = routingContext.request().connection() .indicatedServerName(); final String requestHost = routingContext.request().host(); if (indicatedServerName != null && requestHost != null) { // sometimes the port is present in the host header, remove it final String requestHostNoPort = requestHost.replaceFirst(":\\d+", ""); if (!requestHostNoPort.equals(indicatedServerName)) { log.error(String.format( "Sni check failed, host header: %s, sni value %s", requestHostNoPort, indicatedServerName) ); routingContext.fail(MISDIRECTED_REQUEST.code(), new KsqlApiException("This request was incorrectly sent to this ksqlDB server", Errors.ERROR_CODE_MISDIRECTED_REQUEST)); return; } } } routingContext.next(); }
@Test public void shouldReturnMisdirectedResponseEvenIfPortInHost() { // Given: when(serverRequest.host()).thenReturn("localhost:80"); when(httpConnection.indicatedServerName()).thenReturn("anotherhost"); // When: sniHandler.handle(routingContext); // Then: verify(routingContext).fail(anyInt(), any()); verify(routingContext, never()).next(); }
@Override public int compare(T o1, T o2) { if (o1 == o2) return 0; if (o1 == null) return -1; if (o2 == null) return 1; return compareNonNull(o1, o2); }
@Test public void should_evaluate_null_instances_as_equal() { // GIVEN Object o1 = null; Object o2 = null; // WHEN int compare = NULL_SAFE_COMPARATOR.compare(o1, o2); // THEN then(compare).isZero(); }
public static boolean del(String fullFileOrDirPath) throws IORuntimeException { return del(file(fullFileOrDirPath)); }
@Test @Disabled public void delTest() { // 删除一个不存在的文件,应返回true final boolean result = FileUtil.del("e:/Hutool_test_3434543533409843.txt"); assertTrue(result); }
public static Builder newBuilder(String name) { return new Builder(name); }
@Test void testBuildSlotSharingGroupWithIllegalConfig() { assertThatThrownBy( () -> SlotSharingGroup.newBuilder("ssg") .setCpuCores(1) .setTaskHeapMemory(MemorySize.ZERO) .setTaskOffHeapMemoryMB(10) .build()) .isInstanceOf(IllegalArgumentException.class); }
public RuntimeOptionsBuilder parse(String... args) { return parse(Arrays.asList(args)); }
@Test void ensure_invalid_ordertype_is_not_allowed() { Executable testMethod = () -> parser .parse("--order", "invalid") .build(); IllegalArgumentException actualThrown = assertThrows(IllegalArgumentException.class, testMethod); assertThat(actualThrown.getMessage(), is(equalTo("Invalid order. Must be either reverse, random or random:<long>"))); }
@Override public boolean test(Pickle pickle) { if (expressions.isEmpty()) { return true; } List<String> tags = pickle.getTags(); return expressions.stream() .allMatch(expression -> expression.evaluate(tags)); }
@Test void empty_tag_predicate_matches_pickle_with_any_tags() { Pickle pickle = createPickleWithTags("@FOO"); TagPredicate predicate = createPredicate(""); assertTrue(predicate.test(pickle)); }
public List<ColumnMatchResult<?>> getMismatchedColumns(List<Column> columns, ChecksumResult controlChecksum, ChecksumResult testChecksum) { return columns.stream() .flatMap(column -> columnValidators.get(column.getCategory()).get().validate(column, controlChecksum, testChecksum).stream()) .filter(columnMatchResult -> !columnMatchResult.isMatched()) .collect(toImmutableList()); }
@Test public void testArray() { List<Column> columns = ImmutableList.of(INT_ARRAY_COLUMN, MAP_ARRAY_COLUMN); ChecksumResult controlChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("int_array$checksum", new SqlVarbinary(new byte[] {0xa})) .put("int_array$cardinality_checksum", new SqlVarbinary(new byte[] {0xb})) .put("int_array$cardinality_sum", 1L) .put("map_array$checksum", new SqlVarbinary(new byte[] {0xc})) .put("map_array$cardinality_checksum", new SqlVarbinary(new byte[] {0xd})) .put("map_array$cardinality_sum", 2L) .build()); // Matched assertTrue(checksumValidator.getMismatchedColumns(columns, controlChecksum, controlChecksum).isEmpty()); // Mismatched different elements ChecksumResult testChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("int_array$checksum", new SqlVarbinary(new byte[] {0x1a})) .put("int_array$cardinality_checksum", new SqlVarbinary(new byte[] {0xb})) .put("int_array$cardinality_sum", 1L) .put("map_array$checksum", new SqlVarbinary(new byte[] {0x1c})) .put("map_array$cardinality_checksum", new SqlVarbinary(new byte[] {0xd})) .put("map_array$cardinality_sum", 2L) .build()); assertMismatchedColumns(columns, controlChecksum, testChecksum, INT_ARRAY_COLUMN, MAP_ARRAY_COLUMN); // Mismatched different cardinality checksum testChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("int_array$checksum", new SqlVarbinary(new byte[] {0xa})) .put("int_array$cardinality_checksum", new SqlVarbinary(new byte[] {0x1b})) .put("int_array$cardinality_sum", 1L) .put("map_array$checksum", new SqlVarbinary(new byte[] {0xc})) .put("map_array$cardinality_checksum", new SqlVarbinary(new byte[] {0x1d})) .put("map_array$cardinality_sum", 2L) .build()); assertMismatchedColumns(columns, controlChecksum, testChecksum, INT_ARRAY_COLUMN, MAP_ARRAY_COLUMN); // Mismatched different cardinality sum testChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("int_array$checksum", new SqlVarbinary(new byte[] {0xa})) .put("int_array$cardinality_checksum", new SqlVarbinary(new byte[] {0xb})) .put("int_array$cardinality_sum", 3L) .put("map_array$checksum", new SqlVarbinary(new byte[] {0xc})) .put("map_array$cardinality_checksum", new SqlVarbinary(new byte[] {0xd})) .put("map_array$cardinality_sum", 4L) .build()); assertMismatchedColumns(columns, controlChecksum, testChecksum, INT_ARRAY_COLUMN, MAP_ARRAY_COLUMN); }
static void populateMissingModelName(final Model model, final String fileName, int i) { if (model.getModelName() == null || model.getModelName().isEmpty()) { String modelName = String.format(MODELNAME_TEMPLATE, fileName, model.getClass().getSimpleName(), i); model.setModelName(modelName); } }
@Test void populateMissingModelName() throws Exception { final InputStream inputStream = getFileInputStream(NO_MODELNAME_SAMPLE_NAME); final PMML pmml = org.jpmml.model.PMMLUtil.unmarshal(inputStream); final Model toPopulate = pmml.getModels().get(0); assertThat(toPopulate.getModelName()).isNull(); KiePMMLUtil.populateMissingModelName(toPopulate, NO_MODELNAME_SAMPLE_NAME, 0); assertThat(toPopulate.getModelName()).isNotNull(); String expected = String.format(MODELNAME_TEMPLATE, NO_MODELNAME_SAMPLE_NAME, toPopulate.getClass().getSimpleName(), 0); assertThat(toPopulate.getModelName()).isEqualTo(expected); }
public static boolean isAuthorizationEnabled(Configuration conf) { if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) { return false; } // If the V2 api of authorizer in use, the session state getAuthorizer return null. // Here we disable authorization if we use V2 api or the DefaultHiveAuthorizationProvider // The additional authorization checks happening in hcatalog are designed to // work with storage based authorization (on client side). It should not try doing // additional checks if a V2 authorizer or DefaultHiveAuthorizationProvider is in use. // The recommended configuration is to use storage based authorization in metastore server. // However, if user define a custom V1 authorization, it will be honored. if (SessionState.get().getAuthorizer() == null || DefaultHiveAuthorizationProvider.class.getName().equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER))) { LOG.info("Metastore authorizer is skipped for V2 authorizer or" + " DefaultHiveAuthorizationProvider"); return false; } return true; }
@Test public void authDisabled() throws Exception { HiveConf hcatConf = new HiveConfForTest(this.getClass()); hcatConf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, false); SessionState.start(hcatConf); assertFalse("hcat auth should be disabled", HCatAuthUtil.isAuthorizationEnabled(hcatConf)); }
@Override public long getTimeoutMillis() { return timeoutMillis; }
@Test public void getTimeoutMillis() { TransactionImpl tx = new TransactionImpl(txManagerService, nodeEngine, options, UUID.randomUUID()); assertEquals(options.getTimeoutMillis(), tx.getTimeoutMillis()); }
public List<ParsedTerm> filterElementsContainingUsefulInformation(final Map<String, List<ParsedTerm>> parsedTermsGroupedByField) { return parsedTermsGroupedByField.values() .stream() .map(this::filterElementsContainingUsefulInformation) .flatMap(Collection::stream) .collect(Collectors.toList()); }
@Test void limitsToPositionalTermsIfTheyArePresent() { final Token token1 = new Token(1, "token1"); token1.beginLine = 1; token1.beginColumn = 1; token1.endLine = 1; token1.endColumn = 6; final ParsedTerm positionalTerm1 = ParsedTerm.builder().field("field").value("nvmd").keyToken(ImmutableToken.create(token1)).build(); final Token token2 = new Token(1, "token2"); token1.beginLine = 1; token1.beginColumn = 11; token1.endLine = 1; token1.endColumn = 16; final ParsedTerm positionalTerm2 = ParsedTerm.builder().field("field").value("nvmd").keyToken(ImmutableToken.create(token2)).build(); final Map<String, List<ParsedTerm>> fieldTerms = Map.of( "field", List.of( positionalTerm1, ParsedTerm.create("field", "ah!"), positionalTerm2, ParsedTerm.create("field", "eh!") ) ); assertThat(toTest.filterElementsContainingUsefulInformation(fieldTerms)) .hasSize(2) .contains(positionalTerm1, positionalTerm2); }
public Result checkConnectionToPackage(String pluginId, final com.thoughtworks.go.plugin.api.material.packagerepository.PackageConfiguration packageConfiguration, final RepositoryConfiguration repositoryConfiguration) { return pluginRequestHelper.submitRequest(pluginId, REQUEST_CHECK_PACKAGE_CONNECTION, new DefaultPluginInteractionCallback<>() { @Override public String requestBody(String resolvedExtensionVersion) { return messageConverter(resolvedExtensionVersion).requestMessageForCheckConnectionToPackage(packageConfiguration, repositoryConfiguration); } @Override public Result onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return messageConverter(resolvedExtensionVersion).responseMessageForCheckConnectionToPackage(responseBody); } }); }
@Test public void shouldHandleExceptionDuringPluginInteraction() throws Exception { when(pluginManager.isPluginOfType(PACKAGE_MATERIAL_EXTENSION, PLUGIN_ID)).thenReturn(true); when(pluginManager.submitTo(eq(PLUGIN_ID), eq(PACKAGE_MATERIAL_EXTENSION), requestArgumentCaptor.capture())).thenThrow(new RuntimeException("exception-from-plugin")); try { extension.checkConnectionToPackage(PLUGIN_ID, packageConfiguration, repositoryConfiguration); } catch (Exception e) { assertThat(e.getMessage(), is("exception-from-plugin")); } }
@Override public JsonWriter jsonValue(String value) throws IOException { throw new UnsupportedOperationException(); }
@Test public void testJsonValue() throws IOException { JsonTreeWriter writer = new JsonTreeWriter(); writer.beginArray(); assertThrows(UnsupportedOperationException.class, () -> writer.jsonValue("test")); }
@Override public List<JournalReadEntry> read(long requestedMaximumCount) { return readNext(nextReadOffset, requestedMaximumCount); }
@Test public void truncatedSegment() throws Exception { final Size segmentSize = Size.kilobytes(1L); final LocalKafkaJournal journal = new LocalKafkaJournal(journalDirectory.toPath(), scheduler, segmentSize, Duration.standardHours(1), Size.kilobytes(10L), Duration.standardDays(1), 1_000_000, Duration.standardMinutes(1), 100, new MetricRegistry(), serverStatus); // this will create two segments, each containing 25 messages createBulkChunks(journal, segmentSize, 2); final Path firstSegmentPath = Paths.get(journalDirectory.getAbsolutePath(), "messagejournal-0", "00000000000000000000.log"); assertThat(firstSegmentPath).isRegularFile(); // truncate the first segment so that the last message is cut off final File firstSegment = firstSegmentPath.toFile(); try (FileChannel channel = new FileOutputStream(firstSegment, true).getChannel()) { channel.truncate(firstSegment.length() - 1); } final List<Journal.JournalReadEntry> entriesFromFirstSegment = journal.read(25); assertThat(entriesFromFirstSegment).hasSize(24); final List<Journal.JournalReadEntry> entriesFromSecondSegment = journal.read(25); assertThat(entriesFromSecondSegment).hasSize(25); }
@Override @SuppressWarnings("MagicNumber") public void clear() { pos = 0; if (buffer != null && buffer.length > initialSize * 8) { buffer = new byte[initialSize * 8]; } version = UNKNOWN; wanProtocolVersion = UNKNOWN; }
@Test public void testClear_bufferNull() { out.buffer = null; out.clear(); assertNull(out.buffer); }
public static CreateSourceProperties from(final Map<String, Literal> literals) { try { return new CreateSourceProperties(literals, DurationParser::parse, false); } catch (final ConfigException e) { final String message = e.getMessage().replace( "configuration", "property" ); throw new KsqlException(message, e); } }
@Test public void shouldFailIfInvalidConfig() { // When: final Exception e = assertThrows( KsqlException.class, () -> CreateSourceProperties.from( ImmutableMap.<String, Literal>builder() .putAll(MINIMUM_VALID_PROPS) .put("foo", new StringLiteral("bar")) .build() ) ); // Then: assertThat(e.getMessage(), containsString("Invalid config variable(s) in the WITH clause: FOO")); }
@Override public void monitor(RedisServer master) { connection.sync(RedisCommands.SENTINEL_MONITOR, master.getName(), master.getHost(), master.getPort().intValue(), master.getQuorum().intValue()); }
@Test public void testMonitor() { Collection<RedisServer> masters = connection.masters(); RedisServer master = masters.iterator().next(); master.setName(master.getName() + ":"); connection.monitor(master); }
@Override protected void processOptions(LinkedList<String> args) throws IOException { CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, OPTION_PATHONLY, OPTION_DIRECTORY, OPTION_HUMAN, OPTION_HIDENONPRINTABLE, OPTION_RECURSIVE, OPTION_REVERSE, OPTION_MTIME, OPTION_SIZE, OPTION_ATIME, OPTION_ECPOLICY); cf.parse(args); pathOnly = cf.getOpt(OPTION_PATHONLY); dirRecurse = !cf.getOpt(OPTION_DIRECTORY); setRecursive(cf.getOpt(OPTION_RECURSIVE) && dirRecurse); humanReadable = cf.getOpt(OPTION_HUMAN); hideNonPrintable = cf.getOpt(OPTION_HIDENONPRINTABLE); orderReverse = cf.getOpt(OPTION_REVERSE); orderTime = cf.getOpt(OPTION_MTIME); orderSize = !orderTime && cf.getOpt(OPTION_SIZE); useAtime = cf.getOpt(OPTION_ATIME); displayECPolicy = cf.getOpt(OPTION_ECPOLICY); if (args.isEmpty()) args.add(Path.CUR_DIR); initialiseOrderComparator(); }
@Test public void processPathDirOrderAtimeReverse() throws IOException { TestFile testfile01 = new TestFile("testDirectory", "testFile01"); TestFile testfile02 = new TestFile("testDirectory", "testFile02"); TestFile testfile03 = new TestFile("testDirectory", "testFile03"); TestFile testfile04 = new TestFile("testDirectory", "testFile04"); TestFile testfile05 = new TestFile("testDirectory", "testFile05"); TestFile testfile06 = new TestFile("testDirectory", "testFile06"); // set file atime in different order to file names testfile01.setAtime(NOW.getTime() + 10); testfile02.setAtime(NOW.getTime() + 30); testfile03.setAtime(NOW.getTime() + 20); testfile04.setAtime(NOW.getTime() + 60); testfile05.setAtime(NOW.getTime() + 50); testfile06.setAtime(NOW.getTime() + 40); // set file mtime in different order to atime testfile01.setMtime(NOW.getTime() + 60); testfile02.setMtime(NOW.getTime() + 50); testfile03.setMtime(NOW.getTime() + 20); testfile04.setMtime(NOW.getTime() + 30); testfile05.setMtime(NOW.getTime() + 10); testfile06.setMtime(NOW.getTime() + 40); TestFile testDir = new TestFile("", "testDirectory"); testDir.setIsDir(true); testDir.addContents(testfile01, testfile02, testfile03, testfile04, testfile05, testfile06); LinkedList<PathData> pathData = new LinkedList<PathData>(); pathData.add(testDir.getPathData()); PrintStream out = mock(PrintStream.class); Ls ls = new Ls(); ls.out = out; LinkedList<String> options = new LinkedList<String>(); options.add("-t"); options.add("-u"); options.add("-r"); ls.processOptions(options); String lineFormat = TestFile.computeLineFormat(pathData); ls.processArguments(pathData); InOrder inOrder = inOrder(out); inOrder.verify(out).println("Found 6 items"); inOrder.verify(out).println(testfile01.formatLineAtime(lineFormat)); inOrder.verify(out).println(testfile03.formatLineAtime(lineFormat)); inOrder.verify(out).println(testfile02.formatLineAtime(lineFormat)); inOrder.verify(out).println(testfile06.formatLineAtime(lineFormat)); inOrder.verify(out).println(testfile05.formatLineAtime(lineFormat)); inOrder.verify(out).println(testfile04.formatLineAtime(lineFormat)); verifyNoMoreInteractions(out); }
public String format(Date then) { if (then == null) then = now(); Duration d = approximateDuration(then); return format(d); }
@Test public void testWeeksAgo() throws Exception { PrettyTime t = new PrettyTime(now); Assert.assertEquals("3 weeks ago", t.format(now.minusWeeks(3))); }
@Override public FSDataOutputStream create(Path path, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { String confUmask = mAlluxioConf.getString(PropertyKey.SECURITY_AUTHORIZATION_PERMISSION_UMASK); Mode mode = ModeUtils.applyFileUMask(Mode.defaults(), confUmask); return this.create(path, new FsPermission(mode.toShort()), overwrite, bufferSize, replication, blockSize, progress); }
@Test public void initializeWithCustomizedUgi() throws Exception { mockUserGroupInformation("testuser"); final org.apache.hadoop.conf.Configuration conf = getConf(); URI uri = URI.create(Constants.HEADER + "host:1"); org.apache.hadoop.fs.FileSystem.get(uri, conf); // FileSystem.create would have thrown an exception if the initialization failed. }
@Override public ConnectResponse<ConfigInfos> validate( final String plugin, final Map<String, String> config) { try { final Map<String, String> maskedConfig = QueryMask.getMaskedConnectConfig(config); LOG.debug("Issuing validate request to Kafka Connect at URI {} for plugin {} and config {}", connectUri, plugin, maskedConfig); final ConnectResponse<ConfigInfos> connectResponse = withRetries(() -> Request .put(resolveUri(String.format(VALIDATE_CONNECTOR, plugin))) .setHeaders(requestHeaders) .responseTimeout(Timeout.ofMilliseconds(requestTimeoutMs)) .connectTimeout(Timeout.ofMilliseconds(requestTimeoutMs)) .bodyString(MAPPER.writeValueAsString(config), ContentType.APPLICATION_JSON) .execute(httpClient) .handleResponse( createHandler(HttpStatus.SC_OK, new TypeReference<ConfigInfos>() {}, Function.identity()))); connectResponse.error() .ifPresent(error -> LOG.warn("Did not VALIDATE connector configuration for plugin {} and config {}: {}", plugin, maskedConfig, error)); return connectResponse; } catch (final Exception e) { throw new KsqlServerException(e); } }
@Test public void testValidate() throws JsonProcessingException { // Given: final String plugin = SAMPLE_PLUGIN.getClassName(); final String url = String.format(pathPrefix + "/connector-plugins/%s/config/validate", plugin); final ConfigInfos body = new ConfigInfos( plugin, 1, ImmutableList.of("Common"), ImmutableList.of(new ConfigInfo( new ConfigValueInfo( "file", ImmutableList.of( "Missing required configuration \"file\" which has no default value.")) ))); WireMock.stubFor( WireMock.put(WireMock.urlEqualTo(url)) .withHeader(AUTHORIZATION.toString(), new EqualToPattern(AUTH_HEADER)) .willReturn(WireMock.aResponse() .withStatus(HttpStatus.SC_OK) .withBody(MAPPER.writeValueAsString(body))) ); // When: final Map<String, String> config = ImmutableMap.of( "connector.class", plugin, "tasks.max", "1", "topics", "test-topic" ); final ConnectResponse<ConfigInfos> response = client.validate(plugin, config); // Then: assertThat(response.datum(), OptionalMatchers.of(is(body))); assertThat("Expected no error!", !response.error().isPresent()); }
@Nullable public static String getRelativeDisplayNameFrom(@CheckForNull Item p, @CheckForNull ItemGroup g) { return getRelativeNameFrom(p, g, true); }
@Test public void testGetRelativeDisplayName() { Item i = mock(Item.class); when(i.getName()).thenReturn("jobName"); when(i.getFullDisplayName()).thenReturn("displayName"); assertEquals("displayName", Functions.getRelativeDisplayNameFrom(i, null)); }
public TableStats merge(TableStats other, @Nullable Set<String> partitionKeys) { if (this.rowCount < 0 || other.rowCount < 0) { return TableStats.UNKNOWN; } long rowCount = this.rowCount >= 0 && other.rowCount >= 0 ? this.rowCount + other.rowCount : UNKNOWN.rowCount; return new TableStats(rowCount, mergeColumnStates(other, partitionKeys)); }
@Test void testMergeUnknownRowCount() { TableStats stats1 = new TableStats(-1, new HashMap<>()); TableStats stats2 = new TableStats(32, new HashMap<>()); assertThat(stats1.merge(stats2, null)).isEqualTo(TableStats.UNKNOWN); stats1 = new TableStats(-1, new HashMap<>()); stats2 = new TableStats(-1, new HashMap<>()); assertThat(stats1.merge(stats2, null)).isEqualTo(TableStats.UNKNOWN); stats1 = new TableStats(-3, new HashMap<>()); stats2 = new TableStats(-2, new HashMap<>()); assertThat(stats1.merge(stats2, null)).isEqualTo(TableStats.UNKNOWN); }
public static XPath buildXPath(NamespaceContext namespaceContext) { XPath xPath = XPathFactory.newInstance().newXPath(); xPath.setNamespaceContext(namespaceContext); return xPath; }
@Test public void testBuildXPathNullPointerExpected() { try { XmlHelper.buildXPath(null); fail("NullPointerException expected"); } catch (NullPointerException e) { // Expected. } }
public Map<String, String> build() { Map<String, String> builder = new HashMap<>(); configureFileSystem(builder); configureNetwork(builder); configureCluster(builder); configureSecurity(builder); configureOthers(builder); LOGGER.info("Elasticsearch listening on [HTTP: {}:{}, TCP: {}:{}]", builder.get(ES_HTTP_HOST_KEY), builder.get(ES_HTTP_PORT_KEY), builder.get(ES_TRANSPORT_HOST_KEY), builder.get(ES_TRANSPORT_PORT_KEY)); return builder; }
@Test @UseDataProvider("clusterEnabledOrNot") public void enable_http_connector_on_specified_port(boolean clusterEnabled) throws Exception { String port = "" + 49150; Props props = minProps(clusterEnabled, null, port); Map<String, String> settings = new EsSettings(props, new EsInstallation(props), System2.INSTANCE).build(); assertThat(settings) .containsEntry("http.port", port) .containsEntry("http.host", "127.0.0.1"); }
public static String jaasConfig(String moduleName, Map<String, String> options) { StringJoiner joiner = new StringJoiner(" "); for (Entry<String, String> entry : options.entrySet()) { String key = Objects.requireNonNull(entry.getKey()); String value = Objects.requireNonNull(entry.getValue()); if (key.contains("=") || key.contains(";")) { throw new IllegalArgumentException("Keys must not contain '=' or ';'"); } if (moduleName.isEmpty() || moduleName.contains(";") || moduleName.contains("=")) { throw new IllegalArgumentException("module name must be not empty and must not contain '=' or ';'"); } else { joiner.add(key + "=\"" + value + "\""); } } return moduleName + " required " + joiner + ";"; }
@Test public void testValueContainsSemicolon() { Map<String, String> options = new HashMap<>(); options.put("key1", ";"); String moduleName = "Module"; String expected = "Module required key1=\";\";"; assertEquals(expected, AuthenticationUtils.jaasConfig(moduleName, options)); }
public TolerantFloatComparison isWithin(float tolerance) { return new TolerantFloatComparison() { @Override public void of(float expected) { Float actual = FloatSubject.this.actual; checkNotNull( actual, "actual value cannot be null. tolerance=%s expected=%s", tolerance, expected); checkTolerance(tolerance); if (!equalWithinTolerance(actual, expected, tolerance)) { failWithoutActual( fact("expected", floatToString(expected)), butWas(), fact("outside tolerance", floatToString(tolerance))); } } }; }
@Test public void isWithinOfZero() { assertThat(+0.0f).isWithin(0.00001f).of(+0.0f); assertThat(+0.0f).isWithin(0.00001f).of(-0.0f); assertThat(-0.0f).isWithin(0.00001f).of(+0.0f); assertThat(-0.0f).isWithin(0.00001f).of(-0.0f); assertThat(+0.0f).isWithin(0.0f).of(+0.0f); assertThat(+0.0f).isWithin(0.0f).of(-0.0f); assertThat(-0.0f).isWithin(0.0f).of(+0.0f); assertThat(-0.0f).isWithin(0.0f).of(-0.0f); }
public String getFragmentByChars(int startPosition, int endPosition) { return sourceBuilder.substring(startPosition, endPosition); }
@Test public void getFragmentByChars() { assertThat(sourceFile.getFragmentByChars(3, 8)).isEqualTo("Lorem"); }
public static String prettyJSON(String json) { return prettyJSON(json, TAB_SEPARATOR); }
@Test public void testRenderResultSimpleArray() throws Exception { assertEquals("[\n]", prettyJSON("[]")); }
public static <V> Values<V> create() { return new Values<>(); }
@Test public void testValuesGetName() { assertEquals("Values", Values.<Integer>create().getName()); }
public void put( long key, Long value ) throws KettleValueException { int hashCode = generateHashCode( key ); int indexPointer = indexFor( hashCode, index.length ); LongHashIndexEntry check = index[indexPointer]; LongHashIndexEntry previousCheck = null; while ( check != null ) { // If there is an identical entry in there, we replace the entry // And then we just return... // if ( check.hashCode == hashCode && check.equalsKey( key ) ) { check.value = value; return; } previousCheck = check; check = check.nextEntry; } // Don't forget to link to the previous check entry if there was any... // if ( previousCheck != null ) { previousCheck.nextEntry = new LongHashIndexEntry( hashCode, key, value, null ); } else { index[indexPointer] = new LongHashIndexEntry( hashCode, key, value, null ); } // If required, resize the table... // resize(); }
@Test( timeout = 2000 ) public void testNoEndlessLoop() throws KettleValueException { long[] inputData = { 3034, 2085, 1912, 9367, 8442, 783, 2839, 8610, 5152, 7388, 7511, 1251, 3043, 3889, 9543, 9353, 2241, 5416, 2127, 3513, 2171, 8633, 5594, 7228, 2225, 581, 6524, 7171, 5928, 5710, 804, 9535, 3334, 3383, 3113, 248, 4801, 8927, 1815, 2382, 7043, 2962, 8362, 8353, 5883, 4489, 4554, 218, 50, 200, 9519, 6615, 8264, 5797, 3586, 3185, 299, 2334, 1944, 4953, 9829, 5852, 8424, 1046, 918, 3110, 3145, 7828, 2672, 3890, 8661, 8405, 2653, 1514, 856, 8381, 523, 8343, 6566, 9127, 9679, 8667, 3952, 8310, 164, 8947, 822, 778, 7415, 6463, 6576, 849, 4308, 7044, 7549, 6609, 2494, 1594, 4664, 7937, 2788, 6395, 6875, 6480, 7568, 6789, 2864, 5432, 4163, 3164, 9755, 4898, 5639, 5425, 3291, 6811, 149, 8243, 1423, 3869, 1158, 590, 6376, 5665, 629, 7425, 4414, 9882, 6844, 5941, 4815, 7917, 7673, 4153, 4266, 6673, 9082, 8574, 6424, 8245, 8460, 9875, 6946, 9506, 7479, 2138, 2091, 4336, 5657, 77, 2269, 3682, 7421, 2671, 1413, 4430, 2762, 1174, 9153, 4963, 1959, 3165, 7703, 7539, 9833, 5661, 1189, 839, 1310, 382, 9538, 5953, 2448, 2368, 5385, 1847, 5919, 6954, 9685, 7568, 6915, 4009, 3990, 4742, 5196, 126, 693, 5448, 2405, 1853, 8109, 8198, 6761, 350, 4825, 3288, 9778, 7801, 7950, 2754, 4137, 5682, 6611, 858, 9058, 9616, 5261, 5946, 7339, 5296, 1818, 2591, 2949, 147, 4511, 2431, 7376, 8260, 5719, 264, 5649, 1671, 6014, 9714, 5349, 9824, 8063, 9646, 1203, 5800, 9024, 6730, 1645, 7332, 9524, 4672, 2984, 5491, 8, 8492, 6134, 8895, 9105, 6947, 4917, 9552, 3332, 8117, 3949, 9464, 6730, 7707, 5456, 288, 4462, 6444, 9706, 4575, 8890, 7367, 676, 2974, 2721, 2209, 9692, 6968, 9708, 6959, 2382, 7975, 9866, 2260, 7707, 8916, 1811, 5375, 2490, 7478, 1915, 5538, 4217, 1125, 14, 4033, 8474, 8433, 4315, 4426, 9201, 3205 }; LongHashIndex index = new LongHashIndex(); for ( long currentElement : inputData ) { index.put( currentElement, currentElement ); } }
@Operation(summary = "queryAlertPluginInstanceListPaging", description = "QUERY_ALERT_PLUGIN_INSTANCE_LIST_PAGING_NOTES") @Parameters({ @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)), @Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")), @Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "20")) }) @GetMapping() @ResponseStatus(HttpStatus.OK) @ApiException(LIST_PAGING_ALERT_PLUGIN_INSTANCE_ERROR) public Result<PageInfo<AlertPluginInstanceVO>> listPaging(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { checkPageParams(pageNo, pageSize); searchVal = ParameterUtils.handleEscapes(searchVal); PageInfo<AlertPluginInstanceVO> alertPluginInstanceVOPageInfo = alertPluginInstanceService.listPaging(loginUser, searchVal, pageNo, pageSize); return Result.success(alertPluginInstanceVOPageInfo); }
@Test public void testListPaging() throws Exception { // Given final MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("pluginDefineId", String.valueOf(pluginDefineId)); paramsMap.add("searchVal", "searchVal"); paramsMap.add("pageNo", String.valueOf(1)); paramsMap.add("pageSize", String.valueOf(10)); when(alertPluginInstanceService.listPaging(eq(user), eq("searchVal"), eq(1), eq(10))) .thenReturn(PageInfo.of(1, 10)); // When final MvcResult mvcResult = mockMvc.perform(get("/alert-plugin-instances") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); // Then final Result actualResponseContent = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); assertThat(actualResponseContent.getMsg()).isEqualTo(expectResponseContent.getMsg()); assertThat(actualResponseContent.getCode()).isEqualTo(expectResponseContent.getCode()); }
@Override public ExecuteContext after(ExecuteContext context) { DefaultMQPushConsumer pushConsumer = (DefaultMQPushConsumer) context.getObject(); RocketMqPushConsumerController.cachePushConsumer(pushConsumer); DefaultMqPushConsumerWrapper pushConsumerWrapper = RocketMqPushConsumerController.getPushConsumerWrapper(pushConsumer); if (pushConsumerWrapper != null) { pushConsumerWrapper.setSubscribedTopics(pushConsumerWrapper.getPushConsumerImpl() .getSubscriptionInner().keySet()); } if (handler != null) { handler.doAfter(context); return context; } // Consumer activation will execute Prohibition of consumption on consumers based on the cached prohibition of // consumption configuration disablePushConsumption(pushConsumerWrapper); return context; }
@Test public void testAfter() { interceptor.after(context); Assert.assertTrue(pushConsumerWrapper.getSubscribedTopics().contains("test-topic")); }
@Override public SarifSchema210 deserialize(Path reportPath) { try { return mapper .enable(JsonParser.Feature.INCLUDE_SOURCE_IN_LOCATION) .addHandler(new DeserializationProblemHandler() { @Override public Object handleInstantiationProblem(DeserializationContext ctxt, Class<?> instClass, Object argument, Throwable t) throws IOException { if (!instClass.equals(SarifSchema210.Version.class)) { return NOT_HANDLED; } throw new UnsupportedSarifVersionException(format(UNSUPPORTED_VERSION_MESSAGE_TEMPLATE, argument), t); } }) .readValue(reportPath.toFile(), SarifSchema210.class); } catch (UnsupportedSarifVersionException e) { throw new IllegalStateException(e.getMessage(), e); } catch (JsonMappingException | JsonParseException e) { throw new IllegalStateException(format(SARIF_JSON_SYNTAX_ERROR, reportPath), e); } catch (IOException e) { throw new IllegalStateException(format(SARIF_REPORT_ERROR, reportPath), e); } }
@Test public void deserialize_shouldFail_whenFileCantBeFound() { String file = "wrongPathToFile"; Path sarif = Paths.get(file); assertThatThrownBy(() -> serializer.deserialize(sarif)) .isInstanceOf(IllegalStateException.class) .hasMessage("Failed to read SARIF report at 'wrongPathToFile'"); }
@Override public boolean disable(String pluginId) { return mainLock.applyWithReadLock(() -> { ThreadPoolPlugin plugin = registeredPlugins.get(pluginId); if (Objects.isNull(plugin) || !disabledPlugins.add(pluginId)) { return false; } forQuickIndexes(quickIndex -> quickIndex.removeIfPossible(plugin)); return true; }); }
@Test public void testDisable() { ThreadPoolPlugin plugin = new TestExecuteAwarePlugin(); Assert.assertFalse(manager.disable(plugin.getId())); manager.register(plugin); Assert.assertTrue(manager.disable(plugin.getId())); Assert.assertFalse(manager.disable(plugin.getId())); Assert.assertTrue(manager.getExecuteAwarePluginList().isEmpty()); Assert.assertEquals(1, manager.getAllPlugins().size()); }
@Override public <T> T clone(T object) { if (object instanceof String) { return object; } else if (object instanceof Collection) { Object firstElement = findFirstNonNullElement((Collection) object); if (firstElement != null && !(firstElement instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass()); return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } else if (object instanceof Map) { Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object); if (firstEntry != null) { Object key = firstEntry.getKey(); Object value = firstEntry.getValue(); if (!(key instanceof Serializable) || !(value instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass()); return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } } else if (object instanceof JsonNode) { return (T) ((JsonNode) object).deepCopy(); } if (object instanceof Serializable) { try { return (T) SerializationHelper.clone((Serializable) object); } catch (SerializationException e) { //it is possible that object itself implements java.io.Serializable, but underlying structure does not //in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization } } return jsonClone(object); }
@Test public void should_clone_serializable_complex_object_with_non_serializable_nested_object() { Map<String, List<NonSerializableObject>> map = new LinkedHashMap<>(); map.put("key1", Lists.newArrayList(new NonSerializableObject("name1"))); map.put("key2", Lists.newArrayList( new NonSerializableObject("name2"), new NonSerializableObject("name3") )); Object original = new SerializableComplexObjectWithNonSerializableNestedObject(map); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
public List<Service> importServiceDefinition(String repositoryUrl, Secret repositorySecret, boolean disableSSLValidation, boolean mainArtifact) throws MockRepositoryImportException { log.info("Importing service definitions from {}", repositoryUrl); File localFile = null; Map<String, List<String>> fileProperties = null; if (repositoryUrl.startsWith("http")) { try { HTTPDownloader.FileAndHeaders fileAndHeaders = HTTPDownloader .handleHTTPDownloadToFileAndHeaders(repositoryUrl, repositorySecret, disableSSLValidation); localFile = fileAndHeaders.getLocalFile(); fileProperties = fileAndHeaders.getResponseHeaders(); } catch (IOException ioe) { throw new MockRepositoryImportException(repositoryUrl + " cannot be downloaded", ioe); } } else { // Simply build localFile from repository url. localFile = new File(repositoryUrl); } RelativeReferenceURLBuilder referenceURLBuilder = RelativeReferenceURLBuilderFactory .getRelativeReferenceURLBuilder(fileProperties); String artifactName = referenceURLBuilder.getFileName(repositoryUrl, fileProperties); // Initialize a reference resolver to the folder of this repositoryUrl. ReferenceResolver referenceResolver = new ReferenceResolver(repositoryUrl, repositorySecret, disableSSLValidation, referenceURLBuilder); return importServiceDefinition(localFile, referenceResolver, new ArtifactInfo(artifactName, mainArtifact)); }
@Test void testImportServiceDefinitionMainGraphQLAndSecondaryExamples() { List<Service> services = null; try { File artifactFile = new File("target/test-classes/io/github/microcks/util/graphql/films.graphql"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("films.graphql", true)); } catch (MockRepositoryImportException mrie) { fail("No MockRepositoryImportException should have be thrown"); } assertNotNull(services); assertEquals(1, services.size()); // Inspect Service own attributes. Service importedSvc = services.get(0); assertEquals("Movie Graph API", importedSvc.getName()); assertEquals("1.0", importedSvc.getVersion()); assertEquals("films.graphql", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(4, importedSvc.getOperations().size()); // Inspect and check requests. List<Request> requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(0, requests.size()); // Inspect and check responses. List<Response> responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(0, responses.size()); try { File artifactFile = new File("target/test-classes/io/github/microcks/util/graphql/films-1.0-examples.yml"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("films-1.0-examples.yml", false)); } catch (MockRepositoryImportException mrie) { fail("No MockRepositoryImportException should have be thrown"); } // Inspect Service own attributes. importedSvc = services.get(0); assertEquals("Movie Graph API", importedSvc.getName()); assertEquals("1.0", importedSvc.getVersion()); assertEquals("films.graphql", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(4, importedSvc.getOperations().size()); // Inspect and check requests. requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(1, requests.size()); for (Request request : requests) { assertEquals("films-1.0-examples.yml", request.getSourceArtifact()); } // Inspect and check responses. responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(1, requests.size()); for (Response response : responses) { assertEquals("films-1.0-examples.yml", response.getSourceArtifact()); } // Check that dispatch criteria have been correctly extracted. for (Operation operation : importedSvc.getOperations()) { if ("film".equals(operation.getName())) { requests = requestRepository.findByOperationId(IdBuilder.buildOperationId(importedSvc, operation)); responses = responseRepository.findByOperationId(IdBuilder.buildOperationId(importedSvc, operation)); assertEquals(2, requests.size()); assertEquals(2, responses.size()); for (Response response : responses) { assertTrue("?id=ZmlsbXM6MQ==".equals(response.getDispatchCriteria()) || "?id=ZmlsbXM6Mg==".equals(response.getDispatchCriteria())); } } } }
@Override public void deleteDataSourceConfig(Long id) { // 校验存在 validateDataSourceConfigExists(id); // 删除 dataSourceConfigMapper.deleteById(id); }
@Test public void testDeleteDataSourceConfig_notExists() { // 准备参数 Long id = randomLongId(); // 调用, 并断言异常 assertServiceException(() -> dataSourceConfigService.deleteDataSourceConfig(id), DATA_SOURCE_CONFIG_NOT_EXISTS); }
public static ParamType getSchemaFromType(final Type type) { return getSchemaFromType(type, JAVA_TO_ARG_TYPE); }
@Test public void shouldGetIntervalUnitTypeForTimeUnitClass() { assertThat( UdfUtil.getSchemaFromType(TimeUnit.class), is(ParamTypes.INTERVALUNIT) ); }
@Override public Integer doCall() throws Exception { CommandLineHelper.loadProperties(properties -> { properties.remove(key); CommandLineHelper.storeProperties(properties, printer()); }); return 0; }
@Test public void shouldHandleMissingKeyToUnset() throws Exception { UserConfigHelper.createUserConfig(""" camel-version=latest kamelets-version=greatest """); ConfigUnset command = new ConfigUnset(new CamelJBangMain().withPrinter(printer)); command.key = "foo"; command.doCall(); Assertions.assertEquals("", printer.getOutput()); CommandLineHelper.loadProperties(properties -> { Assertions.assertEquals(2, properties.size()); Assertions.assertEquals("latest", properties.get("camel-version")); Assertions.assertEquals("greatest", properties.get("kamelets-version")); }); }
public CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> commitOffset( RequestContext context, OffsetCommitRequestData request ) throws ApiException { Group group = validateOffsetCommit(context, request); // In the old consumer group protocol, the offset commits maintain the session if // the group is in Stable or PreparingRebalance state. if (group.type() == Group.GroupType.CLASSIC) { ClassicGroup classicGroup = (ClassicGroup) group; if (classicGroup.isInState(ClassicGroupState.STABLE) || classicGroup.isInState(ClassicGroupState.PREPARING_REBALANCE)) { groupMetadataManager.rescheduleClassicGroupMemberHeartbeat( classicGroup, classicGroup.member(request.memberId()) ); } } final OffsetCommitResponseData response = new OffsetCommitResponseData(); final List<CoordinatorRecord> records = new ArrayList<>(); final long currentTimeMs = time.milliseconds(); final OptionalLong expireTimestampMs = expireTimestampMs(request.retentionTimeMs(), currentTimeMs); request.topics().forEach(topic -> { final OffsetCommitResponseTopic topicResponse = new OffsetCommitResponseTopic().setName(topic.name()); response.topics().add(topicResponse); topic.partitions().forEach(partition -> { if (isMetadataInvalid(partition.committedMetadata())) { topicResponse.partitions().add(new OffsetCommitResponsePartition() .setPartitionIndex(partition.partitionIndex()) .setErrorCode(Errors.OFFSET_METADATA_TOO_LARGE.code())); } else { log.debug("[GroupId {}] Committing offsets {} for partition {}-{} from member {} with leader epoch {}.", request.groupId(), partition.committedOffset(), topic.name(), partition.partitionIndex(), request.memberId(), partition.committedLeaderEpoch()); topicResponse.partitions().add(new OffsetCommitResponsePartition() .setPartitionIndex(partition.partitionIndex()) .setErrorCode(Errors.NONE.code())); final OffsetAndMetadata offsetAndMetadata = OffsetAndMetadata.fromRequest( partition, currentTimeMs, expireTimestampMs ); records.add(GroupCoordinatorRecordHelpers.newOffsetCommitRecord( request.groupId(), topic.name(), partition.partitionIndex(), offsetAndMetadata, metadataImage.features().metadataVersion() )); } }); }); if (!records.isEmpty()) { metrics.record(GroupCoordinatorMetrics.OFFSET_COMMITS_SENSOR_NAME, records.size()); } return new CoordinatorResult<>(records, response); }
@Test public void testGenericGroupOffsetCommitWithDeadGroup() { OffsetMetadataManagerTestContext context = new OffsetMetadataManagerTestContext.Builder().build(); // Create a dead group. ClassicGroup group = context.groupMetadataManager.getOrMaybeCreateClassicGroup( "foo", true ); group.transitionTo(ClassicGroupState.DEAD); // Verify that the request is rejected with the correct exception. assertThrows(CoordinatorNotAvailableException.class, () -> context.commitOffset( new OffsetCommitRequestData() .setGroupId("foo") .setMemberId("member") .setGenerationIdOrMemberEpoch(10) .setTopics(Collections.singletonList( new OffsetCommitRequestData.OffsetCommitRequestTopic() .setName("bar") .setPartitions(Collections.singletonList( new OffsetCommitRequestData.OffsetCommitRequestPartition() .setPartitionIndex(0) .setCommittedOffset(100L) )) )) ) ); }
@Override protected void initChannel(Channel channel) throws Exception { channel.pipeline().addLast(this.handler); }
@Test public void testChannelInitializer() throws Exception { NioDatagramChannel channel = new NioDatagramChannel(); NettyUDPChannelInitializer nettyChannelInitializer = new NettyUDPChannelInitializer( new NettyUDPServerHandler(new NettySource())); nettyChannelInitializer.initChannel(channel); assertNotNull(channel.pipeline().toMap()); assertEquals(1, channel.pipeline().toMap().size()); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { // Get the repository objects from the repository... // data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
@Test public void testGetRepoList_withoutNameMask_Extended() throws KettleException { init( repoExtended, "/", true, "", "", All, 4 ); }
public static CookieBuilder newCookieBuilder(HttpRequest request) { return new CookieBuilder(request); }
@Test public void fail_with_NPE_when_cookie_has_no_name() { assertThatThrownBy(() -> newCookieBuilder(request).setName(null)) .isInstanceOf(NullPointerException.class); }
static MethodCallExpr getKiePMMLTargetVariableInitializer(final TargetField targetField) { final MethodDeclaration methodDeclaration = TARGET_TEMPLATE.getMethodsByName(GETKIEPMMLTARGET).get(0).clone(); final BlockStmt targetBody = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration))); final VariableDeclarator variableDeclarator = getVariableDeclarator(targetBody, TARGET).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, TARGET, targetBody))); variableDeclarator.setName(targetField.getName()); final MethodCallExpr toReturn = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, TARGET, targetBody))) .asMethodCallExpr(); final MethodCallExpr builder = getChainedMethodCallExprFrom("builder", toReturn); final StringLiteralExpr nameExpr = new StringLiteralExpr(targetField.getName()); builder.setArgument(0, nameExpr); final ObjectCreationExpr targetFieldInstantiation = getTargetFieldVariableInitializer(targetField); builder.setArgument(2, targetFieldInstantiation); return toReturn; }
@Test void getKiePMMLTargetValueVariableInitializer() throws IOException { TargetField kieTargetField = ModelUtils.convertToKieTargetField(getRandomTarget()); MethodCallExpr retrieved = KiePMMLTargetFactory.getKiePMMLTargetVariableInitializer(kieTargetField); String text = getFileContent(TEST_01_SOURCE); List<TargetValue> kieTargetValues = kieTargetField.getTargetValues(); String opType = OP_TYPE.class.getCanonicalName() + "." + kieTargetField.getOpType().toString(); String castInteger = CAST_INTEGER.class.getCanonicalName() + "." + kieTargetField.getCastInteger().toString(); Expression expected = JavaParserUtils.parseExpression(String.format(text, kieTargetField.getName(), kieTargetValues.get(0).getValue(), kieTargetValues.get(0).getDisplayValue(), kieTargetValues.get(0).getPriorProbability(), kieTargetValues.get(0).getDefaultValue(), kieTargetValues.get(1).getValue(), kieTargetValues.get(1).getDisplayValue(), kieTargetValues.get(1).getPriorProbability(), kieTargetValues.get(1).getDefaultValue(), kieTargetValues.get(2).getValue(), kieTargetValues.get(2).getDisplayValue(), kieTargetValues.get(2).getPriorProbability(), kieTargetValues.get(2).getDefaultValue(), opType, kieTargetField.getField(), castInteger, kieTargetField.getMin(), kieTargetField.getMax(), kieTargetField.getRescaleConstant(), kieTargetField.getRescaleFactor())); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); List<Class<?>> imports = Arrays.asList(Arrays.class, Collections.class, KiePMMLTarget.class, KiePMMLTargetValue.class, TargetField.class, TargetValue.class); commonValidateCompilationWithImports(retrieved, imports); }
public static byte[] longToBytes(long longValue) { return longToBytes(longValue, DEFAULT_ORDER); }
@Test public void bytesToLongTest(){ long a = RandomUtil.randomLong(0, Long.MAX_VALUE); ByteBuffer wrap = ByteBuffer.wrap(ByteUtil.longToBytes(a)); wrap.order(ByteOrder.LITTLE_ENDIAN); long aLong = wrap.getLong(); assertEquals(a, aLong); wrap = ByteBuffer.wrap(ByteUtil.longToBytes(a, ByteOrder.BIG_ENDIAN)); wrap.order(ByteOrder.BIG_ENDIAN); aLong = wrap.getLong(); assertEquals(a, aLong); }
public boolean tryToMoveTo(State to) { AtomicReference<State> lastFrom = new AtomicReference<>(); State newState = this.state.updateAndGet(from -> { lastFrom.set(from); if (TRANSITIONS.get(from).contains(to)) { return to; } return from; }); boolean updated = newState == to && lastFrom.get() != to; LOG.trace("tryToMoveTo from {} to {} => {}", lastFrom.get(), to, updated); return updated; }
@Test public void can_move_to_STOPPING_only_from_STARTING_STARTED_and_OPERATIONAL() { for (State state : values()) { boolean tryToMoveTo = newLifeCycle(state).tryToMoveTo(STOPPING); if (state == STARTING || state == STARTED || state == OPERATIONAL) { assertThat(tryToMoveTo).describedAs("from state " + state).isTrue(); } else { assertThat(tryToMoveTo).describedAs("from state " + state).isFalse(); } } }
public static boolean isContent(ServiceCluster cluster) { return isContent(cluster.serviceType()); }
@Test public void verifyNonContentClusterIsNotRecognized() { ServiceCluster cluster = createServiceCluster(new ServiceType("foo")); assertFalse(VespaModelUtil.isContent(cluster)); }
@Override public boolean isWarProject() { String packaging = project.getPackaging(); return "war".equals(packaging) || "gwt-app".equals(packaging); }
@Test public void testIsWarProject_gwtLibPackagingIsNotWar() { when(mockMavenProject.getPackaging()).thenReturn("gwt-lib"); assertThat(mavenProjectProperties.isWarProject()).isFalse(); }
public static SinkConfig validateUpdate(SinkConfig existingConfig, SinkConfig newConfig) { SinkConfig mergedConfig = clone(existingConfig); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Sink Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getSourceSubscriptionName()) && !newConfig.getSourceSubscriptionName() .equals(existingConfig.getSourceSubscriptionName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().putIfAbsent(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getTopicToSerdeClassName() != null) { newConfig.getTopicToSerdeClassName().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getTopicToSchemaType() != null) { newConfig.getTopicToSchemaType().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { SinkConfig finalMergedConfig = mergedConfig; newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException( "isRegexPattern for input topic " + topicName + " cannot be altered"); } finalMergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees() .equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getConfigs() != null) { mergedConfig.setConfigs(newConfig.getConfigs()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering() .equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering() .equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getResources() != null) { mergedConfig .setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getArchive())) { mergedConfig.setArchive(newConfig.getArchive()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } if (newConfig.getTransformFunction() != null) { mergedConfig.setTransformFunction(newConfig.getTransformFunction()); } if (newConfig.getTransformFunctionClassName() != null) { mergedConfig.setTransformFunctionClassName(newConfig.getTransformFunctionClassName()); } if (newConfig.getTransformFunctionConfig() != null) { mergedConfig.setTransformFunctionConfig(newConfig.getTransformFunctionConfig()); } return mergedConfig; }
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "AutoAck cannot be altered") public void testMergeDifferentAutoAck() { SinkConfig sinkConfig = createSinkConfig(); SinkConfig newSinkConfig = createUpdatedSinkConfig("autoAck", false); SinkConfig mergedConfig = SinkConfigUtils.validateUpdate(sinkConfig, newSinkConfig); }
@Override public void trash(final Local file) throws LocalAccessDeniedException { synchronized(NSWorkspace.class) { if(log.isDebugEnabled()) { log.debug(String.format("Move %s to Trash", file)); } // Asynchronous operation. 0 if the operation is performed synchronously and succeeds, and a positive // integer if the operation is performed asynchronously and succeeds if(!workspace.performFileOperation( NSWorkspace.RecycleOperation, new NFDNormalizer().normalize(file.getParent().getAbsolute()).toString(), StringUtils.EMPTY, NSArray.arrayWithObject(new NFDNormalizer().normalize(file.getName()).toString()))) { throw new LocalAccessDeniedException(String.format("Failed to move %s to Trash", file.getName())); } } }
@Test public void testTrashOpenFile() throws Exception { final Trash trash = new WorkspaceTrashFeature(); final SupportDirectoryFinder finder = new TemporarySupportDirectoryFinder(); final Local temp = finder.find(); final Local directory = LocalFactory.get(temp, UUID.randomUUID().toString()); directory.mkdir(); final Local sub = LocalFactory.get(directory, UUID.randomUUID().toString()); sub.mkdir(); final Local file = LocalFactory.get(sub, UUID.randomUUID().toString()); final Touch touch = LocalTouchFactory.get(); touch.touch(file); try (final OutputStream stream = file.getOutputStream(false)) { trash.trash(directory); } }
@Override public void execute() { ScanResponse result = ddbClient.scan(ScanRequest.builder().tableName(determineTableName()).limit(determineLimit()) .exclusiveStartKey(determineExclusiveStartKey()) .scanFilter(determineScanFilter()).build()); Map<Object, Object> tmp = new HashMap<>(); tmp.put(Ddb2Constants.ITEMS, result.items()); tmp.put(Ddb2Constants.LAST_EVALUATED_KEY, result.hasLastEvaluatedKey() ? result.lastEvaluatedKey() : null); tmp.put(Ddb2Constants.CONSUMED_CAPACITY, result.consumedCapacity()); tmp.put(Ddb2Constants.COUNT, result.count()); tmp.put(Ddb2Constants.SCANNED_COUNT, result.scannedCount()); addToResults(tmp); }
@Test public void execute() { Map<String, Condition> scanFilter = new HashMap<>(); Condition.Builder condition = Condition.builder().comparisonOperator(ComparisonOperator.GT.toString()) .attributeValueList(AttributeValue.builder().n("1985").build()); scanFilter.put("year", condition.build()); exchange.getIn().setHeader(Ddb2Constants.SCAN_FILTER, scanFilter); command.execute(); Map<String, AttributeValue> mapAssert = new HashMap<>(); mapAssert.put("1", AttributeValue.builder().s("LAST_KEY").build()); ConsumedCapacity consumed = (ConsumedCapacity) exchange.getIn().getHeader(Ddb2Constants.CONSUMED_CAPACITY); assertEquals(scanFilter, ddbClient.scanRequest.scanFilter()); assertEquals(Integer.valueOf(10), exchange.getIn().getHeader(Ddb2Constants.SCANNED_COUNT, Integer.class)); assertEquals(Integer.valueOf(1), exchange.getIn().getHeader(Ddb2Constants.COUNT, Integer.class)); assertEquals(Double.valueOf(1.0), consumed.capacityUnits()); assertEquals(mapAssert, exchange.getIn().getHeader(Ddb2Constants.LAST_EVALUATED_KEY, Map.class)); Map<?, ?> items = (Map<?, ?>) exchange.getIn().getHeader(Ddb2Constants.ITEMS, List.class).get(0); assertEquals(AttributeValue.builder().s("attrValue").build(), items.get("attrName")); }
@Override public String toString() { if (columns.isEmpty()) { return ""; } StringJoiner result = new StringJoiner(", ", ", ", ""); columns.forEach(result::add); return result.toString(); }
@Test void assertToStringWithColumns() { assertThat(new InsertColumnsToken(0, Arrays.asList("id", "name")).toString(), is(", id, name")); }
@VisibleForTesting static CompletedCheckpointStore createCompletedCheckpointStore( Configuration jobManagerConfig, CheckpointRecoveryFactory recoveryFactory, Executor ioExecutor, Logger log, JobID jobId, RestoreMode restoreMode) throws Exception { return recoveryFactory.createRecoveredCompletedCheckpointStore( jobId, DefaultCompletedCheckpointStoreUtils.getMaximumNumberOfRetainedCheckpoints( jobManagerConfig, log), SharedStateRegistry.DEFAULT_FACTORY, ioExecutor, restoreMode); }
@Test void testSharedStateRegistration() throws Exception { UUID backendId = UUID.randomUUID(); String localPath = "k0"; StreamStateHandle handle = new ByteStreamStateHandle("h0", new byte[] {1, 2, 3}); CheckpointRecoveryFactory recoveryFactory = buildRecoveryFactory( buildCheckpoint(buildIncrementalHandle(localPath, handle, backendId))); CompletedCheckpointStore checkpointStore = SchedulerUtils.createCompletedCheckpointStore( new Configuration(), recoveryFactory, Executors.directExecutor(), log, new JobID(), RestoreMode.CLAIM); SharedStateRegistry sharedStateRegistry = checkpointStore.getSharedStateRegistry(); IncrementalRemoteKeyedStateHandle newHandle = buildIncrementalHandle( localPath, new PlaceholderStreamStateHandle( handle.getStreamStateHandleID(), handle.getStateSize(), false), backendId); newHandle.registerSharedStates(sharedStateRegistry, 1L); assertThat( newHandle.getSharedState().stream() .filter(e -> e.getLocalPath().equals(localPath)) .findFirst() .get() .getHandle()) .isEqualTo(handle); }
public static String describe(List<org.apache.iceberg.expressions.Expression> exprs) { return exprs.stream().map(Spark3Util::describe).collect(Collectors.joining(", ")); }
@Test public void testDescribeSchema() { Schema schema = new Schema( required(1, "data", Types.ListType.ofRequired(2, Types.StringType.get())), optional( 3, "pairs", Types.MapType.ofOptional(4, 5, Types.StringType.get(), Types.LongType.get())), required(6, "time", Types.TimestampType.withoutZone())); Assert.assertEquals( "Schema description isn't correct.", "struct<data: list<string> not null,pairs: map<string, bigint>,time: timestamp not null>", Spark3Util.describe(schema)); }
@Override public void setNonNullParameter(final PreparedStatement preparedStatement, final int columnIndex, final Boolean columnValue, final JdbcType jdbcType) throws SQLException { preparedStatement.setInt(columnIndex, columnValue ? 1 : 0); }
@Test public void setNonNullParameterTest() { final PostgreSQLBooleanHandler postgreSQLBooleanHandler = new PostgreSQLBooleanHandler(); Assertions.assertDoesNotThrow(() -> postgreSQLBooleanHandler.setNonNullParameter(mock(PreparedStatement.class), 1, true, JdbcType.BIGINT)); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { new SwiftAttributesFinderFeature(session).find(file, listener); return true; } catch(NotfoundException e) { return false; } }
@Test public void testFindKeyWithSamePrefix() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final String prefix = new AlphanumericRandomStringService().random(); final Path other = new Path(container, String.format("%s.%s", prefix, new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.file)); new SwiftTouchFeature(session, new SwiftRegionService(session)).touch(other, new TransferStatus()); final Path file = new Path(container, prefix, EnumSet.of(Path.Type.file)); final SwiftFindFeature feature = new SwiftFindFeature(session); assertFalse(feature.find(file)); assertFalse(feature.find(new Path(file).withType(EnumSet.of(Path.Type.directory)))); new SwiftTouchFeature(session, new SwiftRegionService(session)).touch(file, new TransferStatus()); assertTrue(feature.find(file)); assertFalse(feature.find(new Path(file).withType(EnumSet.of(Path.Type.directory)))); assertFalse(feature.find(new Path(String.format("%s-", file.getAbsolute()), EnumSet.of(Path.Type.file)))); assertFalse(feature.find(new Path(String.format("%s-", file.getAbsolute()), EnumSet.of(Path.Type.directory)))); assertFalse(feature.find(new Path(String.format("-%s", file.getAbsolute()), EnumSet.of(Path.Type.file)))); assertFalse(feature.find(new Path(String.format("-%s", file.getAbsolute()), EnumSet.of(Path.Type.directory)))); assertNotNull(new SwiftAttributesFinderFeature(session).find(file)); new SwiftDeleteFeature(session).delete(Arrays.asList(file, other), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static AddressHolder getAddressHolder(ConsumerBootstrap consumerBootstrap) { String addressHolder = null; try { addressHolder = consumerBootstrap.getConsumerConfig().getAddressHolder(); ExtensionClass<AddressHolder> ext = ExtensionLoaderFactory.getExtensionLoader(AddressHolder.class) .getExtensionClass(addressHolder); if (ext == null) { throw new SofaRpcRuntimeException(LogCodes.getLog(LogCodes.ERROR_LOAD_ADDRESS_HOLDER, addressHolder)); } return ext.getExtInstance(new Class[] { ConsumerBootstrap.class }, new Object[] { consumerBootstrap }); } catch (SofaRpcRuntimeException e) { throw e; } catch (Throwable e) { throw new SofaRpcRuntimeException(LogCodes.getLog(LogCodes.ERROR_LOAD_ADDRESS_HOLDER, addressHolder), e); } }
@Test public void getAddressHolder() throws Exception { ConsumerConfig consumerConfig = new ConsumerConfig().setBootstrap("test").setAddressHolder("test"); ConsumerBootstrap bootstrap = Bootstraps.from(consumerConfig); Assert.assertEquals(AddressHolderFactory.getAddressHolder(bootstrap).getClass(), TestAddressHolder.class); boolean error = false; try { consumerConfig.setAddressHolder("xasdsa"); AddressHolderFactory.getAddressHolder(bootstrap); } catch (Exception e) { error = true; } Assert.assertTrue(error); }
@Override public void publish(ScannerReportWriter writer) { Optional<String> targetBranch = getTargetBranch(); if (targetBranch.isPresent()) { Profiler profiler = Profiler.create(LOG).startInfo(LOG_MSG); int count = writeChangedLines(scmConfiguration.provider(), writer, targetBranch.get()); LOG.debug("SCM reported changed lines for {} {} in the branch", count, ScannerUtils.pluralize("file", count)); profiler.stopInfo(); } }
@Test public void do_not_write_last_line_as_changed_if_its_not_empty() { DefaultInputFile fileWithChangedLines = createInputFile("path1", "l1\nl2\nl3\nl4"); DefaultInputFile fileWithoutChangedLines = createInputFile("path2", "l1\nl2\nl3\nl4"); Set<Path> paths = new HashSet<>(Arrays.asList(BASE_DIR.resolve("path1"), BASE_DIR.resolve("path2"))); Set<Integer> lines = new HashSet<>(Arrays.asList(1, 2, 3)); when(provider.branchChangedLines(TARGET_BRANCH, BASE_DIR, paths)).thenReturn(Collections.singletonMap(BASE_DIR.resolve("path1"), lines)); when(inputComponentStore.allChangedFilesToPublish()).thenReturn(Arrays.asList(fileWithChangedLines, fileWithoutChangedLines)); publisher.publish(writer); assertPublished(fileWithChangedLines, new HashSet<>(Arrays.asList(1, 2, 3))); assertPublished(fileWithoutChangedLines, Collections.emptySet()); }
public Long getConsumerId(String token) { return consumerService.getConsumerIdByToken(token); }
@Test public void testGetConsumerId() throws Exception { String someToken = "someToken"; Long someConsumerId = 1L; when(consumerService.getConsumerIdByToken(someToken)).thenReturn(someConsumerId); assertEquals(someConsumerId, consumerAuthUtil.getConsumerId(someToken)); verify(consumerService, times(1)).getConsumerIdByToken(someToken); }