focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public boolean equals(@Nullable Object other) { if (!(other instanceof PCollectionRowTuple)) { return false; } PCollectionRowTuple that = (PCollectionRowTuple) other; return this.pipeline.equals(that.pipeline) && this.pcollectionMap.equals(that.pcollectionMap); }
@Test public void testEquals() { TestPipeline p = TestPipeline.create(); String intTag = "int"; String strTag = "strs"; PCollection<Row> ints = p.apply("ints", Create.of(toRows(Arrays.asList(3, -42, 77), INT_SCHEMA))); PCollection<Row> strs = p.apply("strs", Create.of(toRows(Arrays.asList("ab", "cd", "ef"), STRING_SCHEMA))); EqualsTester tester = new EqualsTester(); // Empty tuples in the same pipeline are equal tester.addEqualityGroup(PCollectionRowTuple.empty(p), PCollectionRowTuple.empty(p)); tester.addEqualityGroup( PCollectionRowTuple.of(intTag, ints).and(strTag, strs), PCollectionRowTuple.of(intTag, ints).and(strTag, strs)); tester.addEqualityGroup(PCollectionRowTuple.of(intTag, ints)); tester.addEqualityGroup(PCollectionRowTuple.of(strTag, strs)); TestPipeline otherPipeline = TestPipeline.create(); // Empty tuples in different pipelines are not equal tester.addEqualityGroup(PCollectionRowTuple.empty(otherPipeline)); tester.testEquals(); }
@Override public List<IdpEntry> listAvailableIdps() { var entities = mustFetchIdpList(); return entities.stream() .map(e -> new IdpEntry(e.iss(), e.organizationName(), e.logoUri())) .toList(); }
@Test void getList() { var client = new FederationMasterClientImpl(FEDERATION_MASTER, federationApiClient, clock); var idpListEndpoint = FEDERATION_MASTER.resolve("/idplist"); var es = EntityStatement.create() .metadata( Metadata.create() .federationEntity( FederationEntity.create() .idpListEndpoint(idpListEndpoint.toString()) .build()) .build()) .build(); var jws = new EntityStatementJWS(null, es); when(federationApiClient.fetchEntityConfiguration(FEDERATION_MASTER)).thenReturn(jws); var idp1Name = "AOK Testfalen"; var idp2Name = "AOK Nordheim"; var idpListJws = new IdpListJWS( null, new IdpList( null, 0, 0, 0, List.of( new IdpEntity(null, idp1Name, null, null, true), new IdpEntity(null, idp2Name, null, null, true)))); when(federationApiClient.fetchIdpList(idpListEndpoint)).thenReturn(idpListJws); // when var got = client.listAvailableIdps(); // then assertEquals(idp1Name, got.get(0).name()); assertEquals(idp2Name, got.get(1).name()); }
@Override public void describe(SensorDescriptor descriptor) { descriptor .name("One Quick Fix Per Line") .onlyOnLanguages(Xoo.KEY, Xoo2.KEY) .createIssuesForRuleRepositories(XooRulesDefinition.XOO_REPOSITORY, XooRulesDefinition.XOO2_REPOSITORY); }
@Test public void testDescriptor() { DefaultSensorDescriptor descriptor = new DefaultSensorDescriptor(); sensor.describe(descriptor); assertThat(descriptor.ruleRepositories()).containsOnly(XooRulesDefinition.XOO_REPOSITORY, XooRulesDefinition.XOO2_REPOSITORY); }
public static Map<String, String> generateMetricsAndLogConfigMapData(Reconciliation reconciliation, AbstractModel model, MetricsAndLogging metricsAndLogging) { Map<String, String> data = new HashMap<>(2); if (model instanceof SupportsLogging supportsLogging) { data.put(supportsLogging.logging().configMapKey(), supportsLogging.logging().loggingConfiguration(reconciliation, metricsAndLogging.loggingCm())); } if (model instanceof SupportsMetrics supportMetrics) { String parseResult = supportMetrics.metrics().metricsJson(reconciliation, metricsAndLogging.metricsCm()); if (parseResult != null) { data.put(MetricsModel.CONFIG_MAP_KEY, parseResult); } } return data; }
@Test public void testConfigMapDataNoMetricsNoLogging() { Kafka kafka = new KafkaBuilder() .withNewMetadata() .endMetadata() .build(); Map<String, String> data = ConfigMapUtils.generateMetricsAndLogConfigMapData(Reconciliation.DUMMY_RECONCILIATION, new ModelWithoutMetricsAndLogging(kafka), new MetricsAndLogging(null, null)); assertThat(data.size(), is(0)); }
public static Predicate parse(String expression) { final Stack<Predicate> predicateStack = new Stack<>(); final Stack<Character> operatorStack = new Stack<>(); final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll(""); final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true); boolean isTokenMode = true; while (true) { final Character operator; final String token; if (isTokenMode) { if (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); } else { break; } if (OPERATORS.contains(token)) { operator = token.charAt(0); } else { operator = null; } } else { operator = operatorStack.pop(); token = null; } isTokenMode = true; if (operator == null) { try { predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance()); } catch (ClassCastException e) { throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e); } catch (Exception e) { throw new RuntimeException(e); } } else { if (operatorStack.empty() || operator == '(') { operatorStack.push(operator); } else if (operator == ')') { while (operatorStack.peek() != '(') { evaluate(predicateStack, operatorStack); } operatorStack.pop(); } else { if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek())) { evaluate(predicateStack, operatorStack); isTokenMode = false; } operatorStack.push(operator); } } } while (!operatorStack.empty()) { evaluate(predicateStack, operatorStack); } if (predicateStack.size() > 1) { throw new RuntimeException("Invalid logical expression"); } return predicateStack.pop(); }
@Test public void testAndOrAnd() { final Predicate parsed = PredicateExpressionParser.parse("com.linkedin.data.it.AlwaysTruePredicate & com.linkedin.data.it.AlwaysFalsePredicate | com.linkedin.data.it.AlwaysFalsePredicate & com.linkedin.data.it.AlwaysTruePredicate"); Assert.assertEquals(parsed.getClass(), OrPredicate.class); final List<Predicate> orChildren = ((OrPredicate) parsed).getChildPredicates(); Assert.assertEquals(orChildren.get(0).getClass(), AndPredicate.class); Assert.assertEquals(orChildren.get(1).getClass(), AndPredicate.class); final List<Predicate> andChildren0 = ((AndPredicate) orChildren.get(0)).getChildPredicates(); Assert.assertEquals(andChildren0.get(0).getClass(), AlwaysTruePredicate.class); Assert.assertEquals(andChildren0.get(1).getClass(), AlwaysFalsePredicate.class); final List<Predicate> andChildren1 = ((AndPredicate) orChildren.get(1)).getChildPredicates(); Assert.assertEquals(andChildren1.get(0).getClass(), AlwaysFalsePredicate.class); Assert.assertEquals(andChildren1.get(1).getClass(), AlwaysTruePredicate.class); }
@Override public Collection<String> childNames() { ArrayList<String> childNames = new ArrayList<>(); for (ConfigResource configResource : image.resourceData().keySet()) { if (configResource.isDefault()) { childNames.add(configResource.type().name()); } else { childNames.add(configResource.type().name() + ":" + configResource.name()); } } return childNames; }
@Test public void testNodeChildNameParsing() { List<ConfigResource> childResources = NODE.childNames().stream(). sorted(). map(ConfigurationsImageNode::resourceFromName). collect(Collectors.toList()); assertEquals(Arrays.asList( new ConfigResource(BROKER, ""), new ConfigResource(BROKER, "0"), new ConfigResource(TOPIC, ""), new ConfigResource(TOPIC, ":colons:"), new ConfigResource(TOPIC, "__internal"), new ConfigResource(TOPIC, "foobar")), childResources); }
@Override public Collection<MaintenanceAssociation> getAllMaintenanceAssociation(MdId mdName) { log.debug("Retrieving all MA of MD {} from distributed store", mdName); return store.getMaintenanceDomain(mdName) .orElseThrow(() -> new IllegalArgumentException("Unknown MD " + mdName)) .maintenanceAssociationList(); }
@Test public void testGetAllMaintenanceAssociation() { Collection<MaintenanceAssociation> maListMd1 = service.getAllMaintenanceAssociation( MdIdCharStr.asMdId("test-md-1")); assertEquals(2, maListMd1.size()); maListMd1.iterator().forEachRemaining(ma -> assertTrue(ma.maId().maName().endsWith(String.valueOf(ma.maNumericId()))) ); //Now try with an invalid name try { service.getAllMaintenanceAssociation( MdIdCharStr.asMdId("test-md-2")); } catch (IllegalArgumentException e) { assertEquals("Unknown MD test-md-2", e.getMessage()); } }
public static ParamType getSchemaFromType(final Type type) { return getSchemaFromType(type, JAVA_TO_ARG_TYPE); }
@Test public void shouldGetPartialGenericTriFunction() throws NoSuchMethodException { // Given: final Type genericType = getClass().getMethod("partialGenericTriFunctionType").getGenericReturnType(); // When: final ParamType returnType = UdfUtil.getSchemaFromType(genericType); // Then: assertThat(returnType, is(LambdaType.of(ImmutableList.of(GenericType.of("T"), ParamTypes.BOOLEAN, GenericType.of("U")), ParamTypes.INTEGER))); }
public <T> boolean execute(final Predicate<T> predicate, final T arg) { do { if (predicate.test(arg)) { return true; } } while (!isTimeout()); return false; }
@Test void assertExecuteWithNeverTimeout() { assertTrue(new RetryExecutor(-1L, 2L).execute(new Predicate<Integer>() { private int currentCount; @Override public boolean test(final Integer value) { return ++currentCount > 10; } }, null)); }
public static void main(String[] args) { final var scene = new Scene(); var drawPixels1 = List.of( new MutablePair<>(1, 1), new MutablePair<>(5, 6), new MutablePair<>(3, 2) ); scene.draw(drawPixels1); var buffer1 = scene.getBuffer(); printBlackPixelCoordinate(buffer1); var drawPixels2 = List.of( new MutablePair<>(3, 7), new MutablePair<>(6, 1) ); scene.draw(drawPixels2); var buffer2 = scene.getBuffer(); printBlackPixelCoordinate(buffer2); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
@Override public boolean add(final Integer value) { return add(value.intValue()); }
@Test public void setsWithTheDifferentValuesAreNotEqual() { final IntHashSet other = new IntHashSet(100, -1); set.add(1); set.add(1001); other.add(2); other.add(1001); assertNotEquals(set, other); }
public static Duration parse(String str) { if (StringUtils.isBlank(str)) { return DEFAULT_DURATION; } if (SIMPLE.matcher(str).matches()) { if (str.contains(MILLIS_SECOND_UNIT)) { long value = doParse(MILLIS_SECOND_UNIT, str); return Duration.ofMillis(value); } else if (str.contains(DAY_UNIT)) { long value = doParse(DAY_UNIT, str); return Duration.ofDays(value); } else if (str.contains(HOUR_UNIT)) { long value = doParse(HOUR_UNIT, str); return Duration.ofHours(value); } else if (str.contains(MINUTE_UNIT)) { long value = doParse(MINUTE_UNIT, str); return Duration.ofMinutes(value); } else if (str.contains(SECOND_UNIT)) { long value = doParse(SECOND_UNIT, str); return Duration.ofSeconds(value); } else { throw new UnsupportedOperationException("\"" + str + "\" can't parse to Duration"); } } try { if (ISO8601.matcher(str).matches()) { return Duration.parse(str); } } catch (DateTimeParseException e) { throw new UnsupportedOperationException("\"" + str + "\" can't parse to Duration", e); } try { int millis = Integer.parseInt(str); return Duration.ofMillis(millis); } catch (Exception e) { throw new UnsupportedOperationException("\"" + str + "\" can't parse to Duration", e); } }
@Test public void testParseThrowException() { Assertions.assertThrows(UnsupportedOperationException.class, () -> DurationUtil.parse("a")); Assertions.assertThrows(UnsupportedOperationException.class, () -> DurationUtil.parse("as")); Assertions.assertThrows(UnsupportedOperationException.class, () -> DurationUtil.parse("d")); Assertions.assertThrows(UnsupportedOperationException.class, () -> DurationUtil.parse("h")); Assertions.assertThrows(UnsupportedOperationException.class, () -> DurationUtil.parse("m")); Assertions.assertThrows(UnsupportedOperationException.class, () -> DurationUtil.parse("s")); Assertions.assertThrows(UnsupportedOperationException.class, () -> DurationUtil.parse("ms")); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatDropStreamStatementIfExistsDeleteTopic() { // Given: final DropStream dropStream = new DropStream(SOMETHING, true, true); // When: final String formatted = SqlFormatter.formatSql(dropStream); // Then: assertThat(formatted, is("DROP STREAM IF EXISTS SOMETHING DELETE TOPIC")); }
@Override public Batch read(@Nullable ShufflePosition startPosition, @Nullable ShufflePosition endPosition) throws IOException { final BatchRange range = new BatchRange(startPosition, endPosition); try { return cache.get(range); } catch (RuntimeException | ExecutionException e) { Throwables.propagateIfPossible(e, IOException.class); throw new RuntimeException("unexpected", e); } }
@Test public void readerShouldCacheReads() throws IOException { ShuffleBatchReader base = mock(ShuffleBatchReader.class); CachingShuffleBatchReader reader = new CachingShuffleBatchReader(base); when(base.read(null, null)).thenReturn(testBatch); // N.B. We need to capture the result of reader.read() in order to ensure // that there's a strong reference to it, preventing it from being // collected. Not that this should be an issue in tests, but it's good to // be solid. ShuffleBatchReader.Batch read = reader.read(null, null); assertThat(read, equalTo(testBatch)); assertThat(reader.read(null, null), equalTo(testBatch)); assertThat(reader.read(null, null), equalTo(testBatch)); assertThat(reader.read(null, null), equalTo(testBatch)); assertThat(reader.read(null, null), equalTo(testBatch)); verify(base, times(1)).read(null, null); }
public static RpcClient createClusterClient(String clientName, ConnectionType connectionType, Map<String, String> labels) { return createClusterClient(clientName, connectionType, null, null, labels); }
@Test void testCreatedClusterClientWhenConnectionTypeNotMappingThenThrowException() { assertThrows(Exception.class, () -> { RpcClientFactory.createClusterClient("testClient", mock(ConnectionType.class), Collections.singletonMap("labelKey", "labelValue")); }); }
@Override public int hashCode() { return Objects.hash(id, name, conditions); }
@Test public void hashcode_is_based_on_all_fields() { assertThat(underTest) .hasSameHashCodeAs(underTest) .hasSameHashCodeAs(new QualityGate(QUALIGATE_ID, QUALIGATE_NAME, ImmutableSet.of(CONDITION_2, CONDITION_1))); assertThat(underTest.hashCode()).isNotEqualTo(new Object().hashCode()) .isNotEqualTo(new QualityGate("other_id", QUALIGATE_NAME, ImmutableSet.of(CONDITION_2, CONDITION_1)).hashCode()) .isNotEqualTo(new QualityGate(QUALIGATE_ID, "other_name", ImmutableSet.of(CONDITION_2, CONDITION_1)).hashCode()) .isNotEqualTo(new QualityGate(QUALIGATE_ID, QUALIGATE_NAME, emptySet()).hashCode()) .isNotEqualTo(new QualityGate(QUALIGATE_ID, QUALIGATE_NAME, ImmutableSet.of(CONDITION_1)).hashCode()) .isNotEqualTo(new QualityGate(QUALIGATE_ID, QUALIGATE_NAME, ImmutableSet.of(CONDITION_2)).hashCode()) .isNotEqualTo( new QualityGate(QUALIGATE_ID, QUALIGATE_NAME, ImmutableSet.of(CONDITION_1, CONDITION_2, new Condition("new", Condition.Operator.GREATER_THAN, "a"))).hashCode()); }
@Override public ObjectNode encode(MappingAddress address, CodecContext context) { EncodeMappingAddressCodecHelper encoder = new EncodeMappingAddressCodecHelper(address, context); return encoder.encode(); }
@Test public void ipv6MappingAddressTest() { MappingAddress address = MappingAddresses.ipv6MappingAddress(IPV6_PREFIX); ObjectNode result = addressCodec.encode(address, context); assertThat(result, matchesMappingAddress(address)); }
public static List<TypedExpression> coerceCorrectConstructorArguments( final Class<?> type, List<TypedExpression> arguments, List<Integer> emptyCollectionArgumentsIndexes) { Objects.requireNonNull(type, "Type parameter cannot be null as the method searches constructors from that class!"); Objects.requireNonNull(arguments, "Arguments parameter cannot be null! Use an empty list instance if needed instead."); Objects.requireNonNull(emptyCollectionArgumentsIndexes, "EmptyListArgumentIndexes parameter cannot be null! Use an empty list instance if needed instead."); if (emptyCollectionArgumentsIndexes.size() > arguments.size()) { throw new IllegalArgumentException("There cannot be more empty collection arguments than all arguments! emptyCollectionArgumentsIndexes parameter has more items than arguments parameter. " + "(" + emptyCollectionArgumentsIndexes.size() + " > " + arguments.size() + ")"); } // Rather work only with the argumentsType and when a method is resolved, flip the arguments list based on it. final List<TypedExpression> coercedArgumentsTypesList = new ArrayList<>(arguments); Constructor<?> constructor = resolveConstructor(type, coercedArgumentsTypesList); if (constructor != null) { return coercedArgumentsTypesList; } else { // This needs to go through all possible combinations. final int indexesListSize = emptyCollectionArgumentsIndexes.size(); for (int numberOfProcessedIndexes = 0; numberOfProcessedIndexes < indexesListSize; numberOfProcessedIndexes++) { for (int indexOfEmptyListIndex = numberOfProcessedIndexes; indexOfEmptyListIndex < indexesListSize; indexOfEmptyListIndex++) { switchCollectionClassInArgumentsByIndex(coercedArgumentsTypesList, emptyCollectionArgumentsIndexes.get(indexOfEmptyListIndex)); constructor = resolveConstructor(type, coercedArgumentsTypesList); if (constructor != null) { return coercedArgumentsTypesList; } switchCollectionClassInArgumentsByIndex(coercedArgumentsTypesList, emptyCollectionArgumentsIndexes.get(indexOfEmptyListIndex)); } switchCollectionClassInArgumentsByIndex(coercedArgumentsTypesList, emptyCollectionArgumentsIndexes.get(numberOfProcessedIndexes)); } // No constructor found, return the original arguments. return arguments; } }
@Test public void coerceCorrectConstructorArgumentsEmptyCollectionIndexesAreNull() { Assertions.assertThatThrownBy( () -> MethodResolutionUtils.coerceCorrectConstructorArguments( Person.class, Collections.emptyList(), null)) .isInstanceOf(NullPointerException.class); }
public FilterAggregationBuilder buildTermTopAggregation( String topAggregationName, TopAggregationDefinition<?> topAggregation, @Nullable Integer numberOfTerms, Consumer<BoolQueryBuilder> extraFilters, Consumer<FilterAggregationBuilder> otherSubAggregations ) { Consumer<FilterAggregationBuilder> subAggregations = t -> { t.subAggregation(subAggregationHelper.buildTermsAggregation(topAggregationName, topAggregation, numberOfTerms)); otherSubAggregations.accept(t); }; return buildTopAggregation(topAggregationName, topAggregation, extraFilters, subAggregations); }
@Test public void buildTermTopAggregation_adds_filter_from_FiltersComputer_for_TopAggregation() { SimpleFieldTopAggregationDefinition topAggregation = new SimpleFieldTopAggregationDefinition("bar", false); SimpleFieldTopAggregationDefinition otherTopAggregation = new SimpleFieldTopAggregationDefinition("acme", false); BoolQueryBuilder computerFilter = boolQuery(); BoolQueryBuilder otherFilter = boolQuery(); when(filtersComputer.getTopAggregationFilter(topAggregation)).thenReturn(Optional.of(computerFilter)); when(filtersComputer.getTopAggregationFilter(otherTopAggregation)).thenReturn(Optional.of(otherFilter)); String topAggregationName = randomAlphabetic(10); TermsAggregationBuilder termSubAgg = AggregationBuilders.terms("foo"); when(subAggregationHelper.buildTermsAggregation(topAggregationName, topAggregation, null)).thenReturn(termSubAgg); FilterAggregationBuilder aggregationBuilder = underTest.buildTermTopAggregation( topAggregationName, topAggregation, null, NO_EXTRA_FILTER, NO_OTHER_SUBAGGREGATION); assertThat(aggregationBuilder.getName()).isEqualTo(topAggregationName); assertThat(aggregationBuilder.getFilter()).isSameAs(computerFilter); }
public static int getTerm(int y, int n) { if (y < 1900 || y > 2100) { return -1; } if (n < 1 || n > 24) { return -1; } final String _table = S_TERM_INFO[y - 1900]; Integer[] _info = new Integer[6]; for (int i = 0; i < 6; i++) { _info[i] = Integer.parseInt(_table.substring(i * 5, 5 * (i + 1)), 16); } String[] _calday = new String[24]; for (int i = 0; i < 6; i++) { _calday[4 * i] = _info[i].toString().substring(0, 1); _calday[4 * i + 1] = _info[i].toString().substring(1, 3); _calday[4 * i + 2] = _info[i].toString().substring(3, 4); _calday[4 * i + 3] = _info[i].toString().substring(4, 6); } return NumberUtil.parseInt(_calday[n - 1]); }
@Test public void getTermTest() { assertEquals("小寒", SolarTerms.getTerm(2021, 1, 5)); assertEquals("大寒", SolarTerms.getTerm(2021, 1, 20)); assertEquals("立春", SolarTerms.getTerm(2021, 2, 3)); assertEquals("雨水", SolarTerms.getTerm(2021, 2, 18)); assertEquals("惊蛰", SolarTerms.getTerm(2021, 3, 5)); assertEquals("春分", SolarTerms.getTerm(2021, 3, 20)); assertEquals("清明", SolarTerms.getTerm(2021, 4, 4)); assertEquals("谷雨", SolarTerms.getTerm(2021, 4, 20)); assertEquals("立夏", SolarTerms.getTerm(2021, 5, 5)); assertEquals("小满", SolarTerms.getTerm(2021, 5, 21)); assertEquals("芒种", SolarTerms.getTerm(2021, 6, 5)); assertEquals("夏至", SolarTerms.getTerm(2021, 6, 21)); assertEquals("小暑", SolarTerms.getTerm(2021, 7, 7)); assertEquals("大暑", SolarTerms.getTerm(2021, 7, 22)); assertEquals("立秋", SolarTerms.getTerm(2021, 8, 7)); assertEquals("处暑", SolarTerms.getTerm(2021, 8, 23)); assertEquals("白露", SolarTerms.getTerm(2021, 9, 7)); assertEquals("秋分", SolarTerms.getTerm(2021, 9, 23)); assertEquals("寒露", SolarTerms.getTerm(2021, 10, 8)); assertEquals("霜降", SolarTerms.getTerm(2021, 10, 23)); assertEquals("立冬", SolarTerms.getTerm(2021, 11, 7)); assertEquals("小雪", SolarTerms.getTerm(2021, 11, 22)); assertEquals("大雪", SolarTerms.getTerm(2021, 12, 7)); assertEquals("冬至", SolarTerms.getTerm(2021, 12, 21)); }
public static String getAcceptEncoding(HttpServletRequest req) { String encode = StringUtils.defaultIfEmpty(req.getHeader(ACCEPT_ENCODING), StandardCharsets.UTF_8.name()); encode = encode.contains(COMMA) ? encode.substring(0, encode.indexOf(COMMA)) : encode; return encode.contains(SEMI) ? encode.substring(0, encode.indexOf(SEMI)) : encode; }
@Test void testGetAcceptEncoding() { MockHttpServletRequest servletRequest = new MockHttpServletRequest(); assertEquals(StandardCharsets.UTF_8.name(), WebUtils.getAcceptEncoding(servletRequest)); servletRequest.addHeader(HttpHeaderConsts.ACCEPT_ENCODING, "gzip, deflate, br"); assertEquals("gzip", WebUtils.getAcceptEncoding(servletRequest)); }
public void checkExecutePrerequisites(final ExecutionContext executionContext) { ShardingSpherePreconditions.checkState(isValidExecutePrerequisites(executionContext), () -> new TableModifyInTransactionException(getTableName(executionContext))); }
@Test void assertCheckExecutePrerequisitesWhenExecuteDDLNotInXATransaction() { when(connectionSession.getTransactionStatus().isInTransaction()).thenReturn(false); ExecutionContext executionContext = new ExecutionContext( new QueryContext(createMySQLCreateTableStatementContext(), "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)), Collections.emptyList(), mock(RouteContext.class)); new ProxySQLExecutor(JDBCDriverType.STATEMENT, databaseConnectionManager, mock(DatabaseConnector.class), mockQueryContext()).checkExecutePrerequisites(executionContext); }
public static <T> List<List<T>> splitBySize(List<T> list, int expectedSize) throws NullPointerException, IllegalArgumentException { Preconditions.checkNotNull(list, "list must not be null"); Preconditions.checkArgument(expectedSize > 0, "expectedSize must larger than 0"); if (1 == expectedSize) { return Collections.singletonList(list); } int splitSize = Math.min(expectedSize, list.size()); List<List<T>> result = new ArrayList<List<T>>(splitSize); for (int i = 0; i < splitSize; i++) { result.add(new ArrayList<>()); } int index = 0; for (T t : list) { result.get(index).add(t); index = (index + 1) % splitSize; } return result; }
@Test public void testSplitBySizeWithNegativeSize() { List<Integer> lists = Lists.newArrayList(1, 2, 3); int expectSize = -1; expectedEx.expect(IllegalArgumentException.class); expectedEx.expectMessage("expectedSize must larger than 0"); ListUtil.splitBySize(lists, expectSize); }
static ObjectCreationExpr getObjectCreationExprFromInterval(Interval source) { ObjectCreationExpr toReturn = new ObjectCreationExpr(); toReturn.setType(Interval.class.getCanonicalName()); NodeList<Expression> arguments = new NodeList<>(); if (source.getLeftMargin() != null) { arguments.add(new NameExpr(source.getLeftMargin().toString())); } else { arguments.add(new NullLiteralExpr()); } if (source.getRightMargin() != null) { arguments.add(new NameExpr(source.getRightMargin().toString())); } else { arguments.add(new NullLiteralExpr()); } toReturn.setArguments(arguments); return toReturn; }
@Test void getObjectCreationExprFromInterval() { Interval interval = new Interval(null, -14); ObjectCreationExpr retrieved = org.kie.pmml.compiler.commons.codegenfactories.KiePMMLModelFactoryUtils.getObjectCreationExprFromInterval(interval); assertThat(retrieved).isNotNull(); assertThat(retrieved.getType().asString()).isEqualTo(Interval.class.getCanonicalName()); NodeList<Expression> arguments = retrieved.getArguments(); assertThat(arguments).hasSize(2); assertThat(arguments.get(0)).isInstanceOf(NullLiteralExpr.class); assertThat(arguments.get(1).asNameExpr().toString()).isEqualTo(String.valueOf(interval.getRightMargin())); interval = new Interval(-13, 10); retrieved = org.kie.pmml.compiler.commons.codegenfactories.KiePMMLModelFactoryUtils.getObjectCreationExprFromInterval(interval); assertThat(retrieved).isNotNull(); assertThat(retrieved.getType().asString()).isEqualTo(Interval.class.getCanonicalName()); arguments = retrieved.getArguments(); assertThat(arguments).hasSize(2); assertThat(arguments.get(0).asNameExpr().toString()).isEqualTo(String.valueOf(interval.getLeftMargin())); assertThat(arguments.get(1).asNameExpr().toString()).isEqualTo(String.valueOf(interval.getRightMargin())); interval = new Interval(-13, null); retrieved = org.kie.pmml.compiler.commons.codegenfactories.KiePMMLModelFactoryUtils.getObjectCreationExprFromInterval(interval); assertThat(retrieved).isNotNull(); assertThat(retrieved.getType().asString()).isEqualTo(Interval.class.getCanonicalName()); arguments = retrieved.getArguments(); assertThat(arguments).hasSize(2); assertThat(arguments.get(0).asNameExpr().toString()).isEqualTo(String.valueOf(interval.getLeftMargin())); assertThat(arguments.get(1)).isInstanceOf(NullLiteralExpr.class); }
public void decode(ByteBuf buffer) { boolean last; int statusCode; while (true) { switch(state) { case READ_COMMON_HEADER: if (buffer.readableBytes() < SPDY_HEADER_SIZE) { return; } int frameOffset = buffer.readerIndex(); int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET; int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET; buffer.skipBytes(SPDY_HEADER_SIZE); boolean control = (buffer.getByte(frameOffset) & 0x80) != 0; int version; int type; if (control) { // Decode control frame common header version = getUnsignedShort(buffer, frameOffset) & 0x7FFF; type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET); streamId = 0; // Default to session Stream-ID } else { // Decode data frame common header version = spdyVersion; // Default to expected version type = SPDY_DATA_FRAME; streamId = getUnsignedInt(buffer, frameOffset); } flags = buffer.getByte(flagsOffset); length = getUnsignedMedium(buffer, lengthOffset); // Check version first then validity if (version != spdyVersion) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SPDY Version"); } else if (!isValidFrameHeader(streamId, type, flags, length)) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid Frame Error"); } else { state = getNextState(type, length); } break; case READ_DATA_FRAME: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0)); break; } // Generate data frames that do not exceed maxChunkSize int dataLength = Math.min(maxChunkSize, length); // Wait until entire frame is readable if (buffer.readableBytes() < dataLength) { return; } ByteBuf data = buffer.alloc().buffer(dataLength); data.writeBytes(buffer, dataLength); length -= dataLength; if (length == 0) { state = State.READ_COMMON_HEADER; } last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN); delegate.readDataFrame(streamId, last, data); break; case READ_SYN_STREAM_FRAME: if (buffer.readableBytes() < 10) { return; } int offset = buffer.readerIndex(); streamId = getUnsignedInt(buffer, offset); int associatedToStreamId = getUnsignedInt(buffer, offset + 4); byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07); last = hasFlag(flags, SPDY_FLAG_FIN); boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL); buffer.skipBytes(10); length -= 10; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_STREAM Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional); } break; case READ_SYN_REPLY_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_REPLY Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynReplyFrame(streamId, last); } break; case READ_RST_STREAM_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (streamId == 0 || statusCode == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid RST_STREAM Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readRstStreamFrame(streamId, statusCode); } break; case READ_SETTINGS_FRAME: if (buffer.readableBytes() < 4) { return; } boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR); numSettings = getUnsignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); length -= 4; // Validate frame length against number of entries. Each ID/Value entry is 8 bytes. if ((length & 0x07) != 0 || length >> 3 != numSettings) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SETTINGS Frame"); } else { state = State.READ_SETTING; delegate.readSettingsFrame(clear); } break; case READ_SETTING: if (numSettings == 0) { state = State.READ_COMMON_HEADER; delegate.readSettingsEnd(); break; } if (buffer.readableBytes() < 8) { return; } byte settingsFlags = buffer.getByte(buffer.readerIndex()); int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1); int value = getSignedInt(buffer, buffer.readerIndex() + 4); boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE); boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED); buffer.skipBytes(8); --numSettings; delegate.readSetting(id, value, persistValue, persisted); break; case READ_PING_FRAME: if (buffer.readableBytes() < 4) { return; } int pingId = getSignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); state = State.READ_COMMON_HEADER; delegate.readPingFrame(pingId); break; case READ_GOAWAY_FRAME: if (buffer.readableBytes() < 8) { return; } int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); state = State.READ_COMMON_HEADER; delegate.readGoAwayFrame(lastGoodStreamId, statusCode); break; case READ_HEADERS_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid HEADERS Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readHeadersFrame(streamId, last); } break; case READ_WINDOW_UPDATE_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (deltaWindowSize == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid WINDOW_UPDATE Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readWindowUpdateFrame(streamId, deltaWindowSize); } break; case READ_HEADER_BLOCK: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readHeaderBlockEnd(); break; } if (!buffer.isReadable()) { return; } int compressedBytes = Math.min(buffer.readableBytes(), length); ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes); headerBlock.writeBytes(buffer, compressedBytes); length -= compressedBytes; delegate.readHeaderBlock(headerBlock); break; case DISCARD_FRAME: int numBytes = Math.min(buffer.readableBytes(), length); buffer.skipBytes(numBytes); length -= numBytes; if (length == 0) { state = State.READ_COMMON_HEADER; break; } return; case FRAME_ERROR: buffer.skipBytes(buffer.readableBytes()); return; default: throw new Error("Shouldn't reach here."); } } }
@Test public void testInvalidSpdyWindowUpdateFrameLength() throws Exception { short type = 9; byte flags = 0; int length = 12; // invalid length int streamId = RANDOM.nextInt() & 0x7FFFFFFF; int deltaWindowSize = RANDOM.nextInt() & 0x7FFFFFFF | 0x01; ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length); encodeControlFrameHeader(buf, type, flags, length); buf.writeInt(streamId); buf.writeInt(deltaWindowSize); decoder.decode(buf); verify(delegate).readFrameError(anyString()); assertFalse(buf.isReadable()); buf.release(); }
public static <T> T checkFound(@Nullable T value, String message, Object... messageArguments) { if (value == null) { throw new NotFoundException(format(message, messageArguments)); } return value; }
@Test public void checkFound_type_throws_NotFoundException_if_parameter_is_null() { String message = randomAlphabetic(12); assertThatExceptionOfType(NotFoundException.class) .isThrownBy(() -> checkFound(null, message)) .withMessage(message); }
@Override public List<Integer> applyTransforms(List<Integer> originalGlyphIds) { List<Integer> intermediateGlyphsFromGsub = originalGlyphIds; for (String feature : FEATURES_IN_ORDER) { if (!gsubData.isFeatureSupported(feature)) { LOG.debug("the feature {} was not found", feature); continue; } LOG.debug("applying the feature {}", feature); ScriptFeature scriptFeature = gsubData.getFeature(feature); intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature, intermediateGlyphsFromGsub); } return Collections.unmodifiableList(repositionGlyphs(intermediateGlyphsFromGsub)); }
@Test void testApplyTransforms_ja_phala() { // given List<Integer> glyphsAfterGsub = Arrays.asList(89, 156, 101, 97); // when List<Integer> result = gsubWorkerForBengali.applyTransforms(getGlyphIds("ব্যাস")); // then assertEquals(glyphsAfterGsub, result); }
@Override public double score(int[] truth, int[] cluster) { return of(truth, cluster); }
@Test public void test() { System.out.println("rand index"); int[] clusters = {2, 3, 3, 1, 1, 3, 3, 1, 3, 1, 1, 3, 3, 3, 3, 3, 2, 3, 3, 1, 1, 1, 1, 1, 1, 4, 1, 3, 3, 3, 3, 3, 1, 4, 4, 4, 3, 1, 1, 3, 1, 4, 3, 3, 3, 3, 1, 1, 3, 1, 1, 3, 3, 3, 3, 4, 3, 1, 3, 1, 3, 1, 1, 1, 1, 1, 3, 3, 2, 3, 3, 1, 1, 3, 3, 3, 3, 3, 3, 1, 1, 3, 2, 3, 2, 2, 4, 1, 3, 1, 3, 1, 1, 3, 4, 4, 4, 1, 2, 3, 1, 1, 3, 1, 1, 1, 4, 3, 3, 2, 3, 3, 1, 3, 3, 1, 1, 1, 3, 4, 4, 2, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 2, 3, 3, 3, 2, 3, 3, 1, 3, 1, 3, 3, 1, 1, 3, 3, 3, 1, 1, 1, 1, 3, 3, 4, 3, 2, 3, 1, 1, 3, 1, 2, 3, 1, 1, 3, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 3, 1, 3, 1, 1, 3, 1, 1, 1, 3, 2, 1, 2, 1, 1, 1, 1, 1, 3, 1, 1, 3, 3, 1, 3, 3, 3}; int[] alt = {3, 2, 2, 0, 0, 2, 2, 0, 2, 0, 0, 2, 2, 2, 2, 2, 3, 2, 2, 0, 0, 0, 0, 0, 0, 3, 0, 2, 2, 2, 2, 2, 0, 3, 3, 3, 2, 0, 0, 2, 0, 3, 2, 2, 2, 2, 0, 0, 2, 0, 0, 2, 2, 2, 2, 3, 2, 0, 2, 0, 2, 0, 0, 0, 0, 0, 2, 2, 3, 2, 2, 0, 0, 2, 2, 2, 2, 2, 2, 0, 0, 2, 3, 2, 0, 3, 3, 0, 2, 0, 2, 0, 0, 2, 3, 3, 3, 0, 3, 2, 0, 0, 2, 0, 0, 0, 3, 2, 2, 3, 2, 2, 0, 2, 2, 0, 0, 0, 2, 3, 3, 3, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 0, 2, 0, 2, 2, 0, 0, 2, 1, 2, 0, 0, 0, 0, 2, 2, 3, 2, 1, 2, 0, 0, 2, 0, 3, 2, 0, 0, 2, 2, 0, 0, 0, 0, 0, 2, 0, 2, 0, 2, 0, 0, 0, 0, 2, 0, 0, 0, 2, 3, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 2, 2, 0, 2, 2, 2}; RandIndex instance = new RandIndex(); double expResult = 0.9651; double result = instance.score(clusters, alt); assertEquals(expResult, result, 1E-4); }
public static FormattingTuple arrayFormat(final String messagePattern, final Object[] argArray) { if (argArray == null || argArray.length == 0) { return new FormattingTuple(messagePattern, null); } int lastArrIdx = argArray.length - 1; Object lastEntry = argArray[lastArrIdx]; Throwable throwable = lastEntry instanceof Throwable? (Throwable) lastEntry : null; if (messagePattern == null) { return new FormattingTuple(null, throwable); } int j = messagePattern.indexOf(DELIM_STR); if (j == -1) { // this is a simple string return new FormattingTuple(messagePattern, throwable); } StringBuilder sbuf = new StringBuilder(messagePattern.length() + 50); int i = 0; int L = 0; do { boolean notEscaped = j == 0 || messagePattern.charAt(j - 1) != ESCAPE_CHAR; if (notEscaped) { // normal case sbuf.append(messagePattern, i, j); } else { sbuf.append(messagePattern, i, j - 1); // check that escape char is not is escaped: "abc x:\\{}" notEscaped = j >= 2 && messagePattern.charAt(j - 2) == ESCAPE_CHAR; } i = j + 2; if (notEscaped) { deeplyAppendParameter(sbuf, argArray[L], null); L++; if (L > lastArrIdx) { break; } } else { sbuf.append(DELIM_STR); } j = messagePattern.indexOf(DELIM_STR, i); } while (j != -1); // append the characters following the last {} pair. sbuf.append(messagePattern, i, messagePattern.length()); return new FormattingTuple(sbuf.toString(), L <= lastArrIdx? throwable : null); }
@Test public void testNullArray() { String msg0 = "msg0"; String msg1 = "msg1 {}"; String msg2 = "msg2 {} {}"; String msg3 = "msg3 {} {} {}"; Object[] args = null; String result = MessageFormatter.arrayFormat(msg0, args).getMessage(); assertEquals(msg0, result); result = MessageFormatter.arrayFormat(msg1, args).getMessage(); assertEquals(msg1, result); result = MessageFormatter.arrayFormat(msg2, args).getMessage(); assertEquals(msg2, result); result = MessageFormatter.arrayFormat(msg3, args).getMessage(); assertEquals(msg3, result); }
@Override public void addTask(Object tag, AbstractExecuteTask task) { NacosTaskProcessor processor = getProcessor(tag); if (null != processor) { processor.process(task); return; } TaskExecuteWorker worker = getWorker(tag); worker.process(task); }
@Test void testAddTask() throws InterruptedException { executeTaskExecuteEngine.addTask("test", task); TimeUnit.SECONDS.sleep(1); verify(task).run(); assertTrue(executeTaskExecuteEngine.isEmpty()); assertEquals(0, executeTaskExecuteEngine.size()); }
public String convert(ILoggingEvent le) { long timestamp = le.getTimeStamp(); return cachingDateFormatter.format(timestamp); }
@Test public void convertsDateWithSpecifiedLocaleLang() { assertEquals(FRENCH_TIME_UTC, convert(_timestamp, DATETIME_PATTERN, "UTC", "fr")); }
public Optional<String> getHostName(String nodeId) { return hostNameCache.getUnchecked(nodeId); }
@Test public void getHostNameReturnsNodeNameIfNodeIdIsValid() { when(cluster.nodeIdToHostName("node_id")).thenReturn(Optional.of("node-hostname")); assertThat(nodeInfoCache.getHostName("node_id")).contains("node-hostname"); }
public synchronized void add(byte[] element) throws IOException { if (currentBatch.size() >= BATCHSIZE) { flush(); } currentBatch.add(element); writeIntoCompressedStream(element); }
@Test @Disabled void benchmark() throws IOException { final int count = BATCHSIZE * 100000; for (int i = 1; i <= count; i++) { buffer.add(("Loop " + i + "\n").getBytes(StandardCharsets.UTF_8)); } }
@Override public Properties getConfig(RedisClusterNode node, String pattern) { RedisClient entry = getEntry(node); RFuture<List<String>> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_GET, pattern); List<String> r = syncFuture(f); if (r != null) { return Converters.toProperties(r); } return null; }
@Test public void testGetConfig() { RedisClusterNode master = getFirstMaster(); Properties config = connection.getConfig(master, "*"); assertThat(config.size()).isGreaterThan(20); }
static String newId() { return String.format("%08x", new Random().nextInt()); }
@Test public void newId() { String id = DaemonConnector.newId(); assertNotNull(id); assertEquals(8, id.length()); }
public static boolean isValidRootUrl(String url) { UrlValidator validator = new CustomUrlValidator(); return validator.isValid(url); }
@Test @Issue("JENKINS-51158") public void trailingDotsAreAccepted() { assertTrue(UrlHelper.isValidRootUrl("http://jenkins.internal./")); assertTrue(UrlHelper.isValidRootUrl("http://jenkins.internal......./")); assertTrue(UrlHelper.isValidRootUrl("http://my-server.domain.:8080/jenkins")); assertTrue(UrlHelper.isValidRootUrl("http://my-server.domain......:8080/jenkins")); assertTrue(UrlHelper.isValidRootUrl("http://jenkins.com.")); assertTrue(UrlHelper.isValidRootUrl("http://jenkins.com......")); }
public Rule<ProjectNode> projectNodeRule() { return new PullUpExpressionInLambdaProjectNodeRule(); }
@Test public void testNonDeterministicProjection() { tester().assertThat(new PullUpExpressionInLambdaRules(getFunctionManager()).projectNodeRule()) .setSystemProperty(PULL_EXPRESSION_FROM_LAMBDA_ENABLED, "true") .on(p -> { p.variable("idmap", new MapType(BIGINT, BIGINT, KEY_BLOCK_EQUALS, KEY_BLOCK_HASH_CODE)); return p.project( Assignments.builder().put(p.variable("expr"), p.rowExpression("map_filter(idmap, (k, v) -> array_position(array[random()], k) <= 200)")).build(), p.values(p.variable("idmap", new MapType(BIGINT, BIGINT, KEY_BLOCK_EQUALS, KEY_BLOCK_HASH_CODE)))); }).doesNotFire(); }
@Override public void characters(char[] ch, int start, int length) throws SAXException { advance(length); super.characters(ch, start, length); }
@Test public void testOneCharacterPerByte() throws IOException { try { char[] ch = new char[1]; for (int i = 0; i < MANY_BYTES; i++) { stream.read(); handler.characters(ch, 0, ch.length); } } catch (SAXException e) { fail("Unexpected SAXException"); } }
@Override protected FieldValue doGet(String fieldName, EventWithContext eventWithContext) { final ImmutableMap.Builder<String, Object> dataModelBuilder = ImmutableMap.builder(); if (eventWithContext.messageContext().isPresent()) { dataModelBuilder.put("source", eventWithContext.messageContext().get().getFields()); } else if (eventWithContext.eventContext().isPresent()) { dataModelBuilder.put("source", eventWithContext.eventContext().get().toDto().fields()); } final ImmutableMap<String, Object> dataModel = dataModelBuilder.build(); if (!isValidTemplate(config.template(), dataModel)) { return FieldValue.error(); } try { return FieldValue.string(templateEngine.transform(config.template(), dataModel)); } catch (Exception e) { LOG.error("Couldn't render field template \"{}\"", config.template(), e); return FieldValue.error(); } }
@Test public void templateWithMessageContext() { final TestEvent event = new TestEvent(); final EventWithContext eventWithContext = EventWithContext.create(event, newMessage(ImmutableMap.of("hello", "world"))); final FieldValue fieldValue = newTemplate("hello: ${source.hello}").doGet("test", eventWithContext); assertThat(fieldValue.value()).isEqualTo("hello: world"); }
public IsJson(Matcher<? super ReadContext> jsonMatcher) { this.jsonMatcher = jsonMatcher; }
@Test public void shouldNotMatchJsonStringEvaluatedToFalse() { assertThat(BOOKS_JSON_STRING, not(isJson(withPathEvaluatedTo(false)))); }
@Override public String filterCharacters(String input) { char[] chars = null; final int strLen = input.length(); int pos = 0; for (int i = 0; i < strLen; i++) { final char c = input.charAt(i); switch (c) { case ':': if (chars == null) { chars = input.toCharArray(); } chars[pos++] = '-'; break; default: if (chars != null) { chars[pos] = c; } pos++; } } return chars == null ? input : new String(chars, 0, pos); }
@Test void testReplaceInvalidChars() { StatsDReporter reporter = new StatsDReporter(); assertThat(reporter.filterCharacters("")).isEqualTo(""); assertThat(reporter.filterCharacters("abc")).isEqualTo("abc"); assertThat(reporter.filterCharacters("a:b::")).isEqualTo("a-b--"); }
@Override public void run() { log.info("Starting file watcher to watch for changes: " + file); try { while (!shutdown && key.isValid()) { try { handleNextWatchNotification(); } catch (Exception e) { log.error("Watch service caught exception, will continue:" + e); } } } finally { log.info("Stopped watching for TLS cert changes"); try { watchService.close(); } catch (IOException e) { log.info("Error closing watch service", e); } } }
@Test public void willStopIfDirectoryDeleted() throws Exception { // Given: watcher = new FileWatcher(filePath, callback); watcher.start(); // When: final Path parent = filePath.getParent(); if (parent != null) { Files.delete(parent); } else { Assert.fail("Expected parent for " + filePath); } // Then: verify(callback, never()).run(); assertThatEventually(watcher::isAlive, is(false)); }
@Override public Optional<Entity> exportEntity(EntityDescriptor entityDescriptor, EntityDescriptorIds entityDescriptorIds) { final ModelId modelId = entityDescriptor.id(); return lookupTableService.get(modelId.id()).map(lookupTableDto -> exportNativeEntity(lookupTableDto, entityDescriptorIds)); }
@Test public void exportEntity() { final EntityDescriptor tableDescriptor = EntityDescriptor.create("1234567890", ModelTypes.LOOKUP_TABLE_V1); final EntityDescriptor adapterDescriptor = EntityDescriptor.create("data-adapter-1234", ModelTypes.LOOKUP_ADAPTER_V1); final EntityDescriptor cacheDescriptor = EntityDescriptor.create("cache-1234", ModelTypes.LOOKUP_CACHE_V1); final EntityDescriptorIds entityDescriptorIds = EntityDescriptorIds.of( tableDescriptor, adapterDescriptor, cacheDescriptor ); final LookupTableDto lookupTableDto = LookupTableDto.builder() .id("1234567890") .name("lookup-table-name") .title("Lookup Table Title") .description("Lookup Table Description") .dataAdapterId("data-adapter-1234") .cacheId("cache-1234") .defaultSingleValue("default-single") .defaultSingleValueType(LookupDefaultValue.Type.STRING) .defaultMultiValue("default-multi") .defaultMultiValueType(LookupDefaultValue.Type.STRING) .build(); final Entity entity = facade.exportNativeEntity(lookupTableDto, entityDescriptorIds); assertThat(entity).isInstanceOf(EntityV1.class); assertThat(entity.id()).isEqualTo(ModelId.of(entityDescriptorIds.get(tableDescriptor).orElse(null))); assertThat(entity.type()).isEqualTo(ModelTypes.LOOKUP_TABLE_V1); final EntityV1 entityV1 = (EntityV1) entity; final LookupTableEntity lookupTableEntity = objectMapper.convertValue(entityV1.data(), LookupTableEntity.class); assertThat(lookupTableEntity.name()).isEqualTo(ValueReference.of("lookup-table-name")); assertThat(lookupTableEntity.title()).isEqualTo(ValueReference.of("Lookup Table Title")); assertThat(lookupTableEntity.description()).isEqualTo(ValueReference.of("Lookup Table Description")); assertThat(lookupTableEntity.dataAdapterName()).isEqualTo(ValueReference.of(entityDescriptorIds.get(adapterDescriptor).orElse(null))); assertThat(lookupTableEntity.cacheName()).isEqualTo(ValueReference.of(entityDescriptorIds.get(cacheDescriptor).orElse(null))); assertThat(lookupTableEntity.defaultSingleValue()).isEqualTo(ValueReference.of("default-single")); assertThat(lookupTableEntity.defaultSingleValueType()).isEqualTo(ValueReference.of(LookupDefaultValue.Type.STRING)); assertThat(lookupTableEntity.defaultMultiValue()).isEqualTo(ValueReference.of("default-multi")); assertThat(lookupTableEntity.defaultMultiValueType()).isEqualTo(ValueReference.of(LookupDefaultValue.Type.STRING)); }
@Override public String formatTableNamePattern(final String tableNamePattern) { return tableNamePattern.toUpperCase(); }
@Test void assertFormatTableNamePattern() { assertThat(dialectDatabaseMetaData.formatTableNamePattern("tbl"), is("TBL")); }
public long getExpiredTimeInSeconds(String token) throws AccessException { if (!authConfigs.isAuthEnabled()) { return tokenValidityInSeconds; } return jwtParser.getExpireTimeInSeconds(token); }
@Test void testGetExpiredTimeInSeconds() throws AccessException { assertTrue(jwtTokenManager.getExpiredTimeInSeconds(jwtTokenManager.createToken("nacos")) > 0); }
public static MetadataUpdate fromJson(String json) { return JsonUtil.parse(json, MetadataUpdateParser::fromJson); }
@Test public void testRemoveSnapshotsFromJson() { String action = MetadataUpdateParser.REMOVE_SNAPSHOTS; long snapshotId = 2L; String json = String.format("{\"action\":\"%s\",\"snapshot-ids\":[2]}", action); MetadataUpdate expected = new MetadataUpdate.RemoveSnapshot(snapshotId); assertEquals(action, expected, MetadataUpdateParser.fromJson(json)); }
@Nullable static ProxyProvider createFrom(Properties properties) { Objects.requireNonNull(properties, "properties"); if (properties.containsKey(HTTP_PROXY_HOST) || properties.containsKey(HTTPS_PROXY_HOST)) { return createHttpProxyFrom(properties); } if (properties.containsKey(SOCKS_PROXY_HOST)) { return createSocksProxyFrom(properties); } return null; }
@Test void proxyFromSystemProperties_npeWhenHttpProxyUsernameIsSetButNotPassword() { Properties properties = new Properties(); properties.setProperty(ProxyProvider.HTTP_PROXY_HOST, "host"); properties.setProperty(ProxyProvider.HTTP_PROXY_USER, "user"); Throwable throwable = catchThrowable(() -> ProxyProvider.createFrom(properties)); assertThat(throwable) .isInstanceOf(NullPointerException.class) .hasMessage("Proxy username is set via 'http.proxyUser', but 'http.proxyPassword' is not set."); }
public static String maskUrlQueryParameters(List<NameValuePair> keyValueList, Set<String> queryParamsForFullMask, Set<String> queryParamsForPartialMask) { return maskUrlQueryParameters(keyValueList, queryParamsForFullMask, queryParamsForPartialMask, DEFAULT_QUERY_STRINGBUILDER_CAPACITY); }
@Test // If a config for partial masking is introduced, this test will have to be // modified for the config-controlled partial mask length public void testMaskUrlQueryParameters() throws Exception { Set<String> fullMask = new HashSet<>(Arrays.asList("abc", "bcd")); Set<String> partialMask = new HashSet<>(Arrays.asList("pqr", "xyz")); //Partial and full masking test List<NameValuePair> keyValueList = URLEncodedUtils .parse("abc=123&pqr=45678&def=789&bcd=012&xyz=678", StandardCharsets.UTF_8); Assert.assertEquals("Incorrect masking", "abc=XXXXX&pqr=456XX&def=789&bcd=XXXXX&xyz=67X", UriUtils.maskUrlQueryParameters(keyValueList, fullMask, partialMask)); //Mask GUIDs keyValueList = URLEncodedUtils .parse("abc=123&pqr=256877f2-c094-48c8-83df-ddb5825694fd&def=789", StandardCharsets.UTF_8); Assert.assertEquals("Incorrect partial masking for guid", "abc=XXXXX&pqr=256877f2-c094-48c8XXXXXXXXXXXXXXXXXX&def=789", UriUtils.maskUrlQueryParameters(keyValueList, fullMask, partialMask)); //For params entered for both full and partial masks, full mask applies partialMask.add("abc"); Assert.assertEquals("Full mask should apply", "abc=XXXXX&pqr=256877f2-c094-48c8XXXXXXXXXXXXXXXXXX&def=789", UriUtils.maskUrlQueryParameters(keyValueList, fullMask, partialMask)); //Duplicate key (to be masked) with different values keyValueList = URLEncodedUtils .parse("abc=123&pqr=4561234&abc=789", StandardCharsets.UTF_8); Assert.assertEquals("Duplicate key: Both values should get masked", "abc=XXXXX&pqr=4561XXX&abc=XXXXX", UriUtils.maskUrlQueryParameters(keyValueList, fullMask, partialMask)); //Duplicate key (not to be masked) with different values keyValueList = URLEncodedUtils .parse("abc=123&def=456&pqrs=789&def=000", StandardCharsets.UTF_8); Assert.assertEquals("Duplicate key: Values should not get masked", "abc=XXXXX&def=456&pqrs=789&def=000", UriUtils.maskUrlQueryParameters(keyValueList, fullMask, partialMask)); //Empty param value keyValueList = URLEncodedUtils .parse("abc=123&def=&pqr=789&s=1", StandardCharsets.UTF_8); Assert.assertEquals("Incorrect url with empty query value", "abc=XXXXX&def=&pqr=78X&s=1", UriUtils.maskUrlQueryParameters(keyValueList, fullMask, partialMask)); //Empty param key keyValueList = URLEncodedUtils .parse("def=2&pqr=789&s=1", StandardCharsets.UTF_8); keyValueList.add(new BasicNameValuePair("", "m1")); List<NameValuePair> finalKeyValueList = keyValueList; intercept(IllegalArgumentException.class, () -> UriUtils .maskUrlQueryParameters(finalKeyValueList, fullMask, partialMask)); //Param (not to be masked) with null value keyValueList = URLEncodedUtils .parse("abc=123&s=1", StandardCharsets.UTF_8); keyValueList.add(new BasicNameValuePair("null1", null)); Assert.assertEquals("Null value, incorrect query construction", "abc=XXXXX&s=1&null1=", UriUtils.maskUrlQueryParameters(keyValueList, fullMask, partialMask)); //Param (to be masked) with null value keyValueList.add(new BasicNameValuePair("null2", null)); fullMask.add("null2"); Assert.assertEquals("No mask should be added for null value", "abc=XXXXX&s=1&null1=&null2=", UriUtils .maskUrlQueryParameters(keyValueList, fullMask, partialMask)); //no mask }
public static int parseCpuAsMilliCpus(String cpu) { int suffixIndex = cpu.length() - 1; try { if ("m".equals(cpu.substring(suffixIndex))) { return Integer.parseInt(cpu.substring(0, suffixIndex)); } else { return (int) (Double.parseDouble(cpu) * 1000L); } } catch (NumberFormatException e) { throw new IllegalArgumentException("Failed to parse CPU quantity \"" + cpu + "\""); } }
@Test public void testParse() { assertThat(parseCpuAsMilliCpus("100000"), is(100000000)); assertThat(parseCpuAsMilliCpus("1"), is(1000)); assertThat(parseCpuAsMilliCpus("1m"), is(1)); assertThat(parseCpuAsMilliCpus("0.5"), is(500)); assertThat(parseCpuAsMilliCpus("0"), is(0)); assertThat(parseCpuAsMilliCpus("0m"), is(0)); assertThat(parseCpuAsMilliCpus("0.0"), is(0)); assertThat(parseCpuAsMilliCpus("0.000001"), is(0)); assertThrows(IllegalArgumentException.class, () -> parseCpuAsMilliCpus("0.0m")); assertThrows(IllegalArgumentException.class, () -> parseCpuAsMilliCpus("0.1m")); }
public static AvroGenericCoder of(Schema schema) { return AvroGenericCoder.of(schema); }
@Test public void testDeterminismStringable() { assertDeterministic(AvroCoder.of(String.class)); assertNonDeterministic( AvroCoder.of(StringableClass.class), reasonClass(StringableClass.class, "may not have deterministic #toString()")); }
public static String validateColumnName(@Nullable String columnName) { String name = requireNonNull(columnName, "Column name cannot be null"); checkDbIdentifierCharacters(columnName, "Column name"); return name; }
@Test public void accept_valid_table_name() { validateColumnName("date_in_ms"); validateColumnName("date_in_ms_1"); }
public static String joinPaths(final String... paths) { if (paths == null || paths.length == 0) { return ""; } final StringBuilder joined = new StringBuilder(); boolean addedLast = false; for (int i = paths.length - 1; i >= 0; i--) { String path = paths[i]; if (ObjectHelper.isNotEmpty(path)) { if (addedLast) { path = stripSuffix(path, "/"); } addedLast = true; if (path.charAt(0) == '/') { joined.insert(0, path); } else { if (i > 0) { joined.insert(0, '/').insert(1, path); } else { joined.insert(0, path); } } } } return joined.toString(); }
@Test public void shouldJoinPaths() { assertThat(URISupport.joinPaths(null, null)).isEmpty(); assertThat(URISupport.joinPaths("", null)).isEmpty(); assertThat(URISupport.joinPaths(null, "")).isEmpty(); assertThat(URISupport.joinPaths("", "")).isEmpty(); assertThat(URISupport.joinPaths("a", "")).isEqualTo("a"); assertThat(URISupport.joinPaths("a", "b")).isEqualTo("a/b"); assertThat(URISupport.joinPaths("/a", "b")).isEqualTo("/a/b"); assertThat(URISupport.joinPaths("/a", "b/")).isEqualTo("/a/b/"); assertThat(URISupport.joinPaths("/a/", "b/")).isEqualTo("/a/b/"); assertThat(URISupport.joinPaths("/a/", "/b/")).isEqualTo("/a/b/"); assertThat(URISupport.joinPaths("a", "b", "c")).isEqualTo("a/b/c"); assertThat(URISupport.joinPaths("a", null, "c")).isEqualTo("a/c"); assertThat(URISupport.joinPaths("/a/", "/b", "c/", "/d/")).isEqualTo("/a/b/c/d/"); assertThat(URISupport.joinPaths("/a/", "/b", "c/", null)).isEqualTo("/a/b/c/"); assertThat(URISupport.joinPaths("/a/", null, null, null)).isEqualTo("/a/"); assertThat(URISupport.joinPaths("a/", "/b", null, null)).isEqualTo("a/b"); }
public static boolean match(final Integer strategy, final List<ConditionData> conditionDataList, final ServerWebExchange exchange) { return newInstance(strategy).match(conditionDataList, exchange); }
@Test public void testMatch() { assertFalse(MatchStrategyFactory.match(MatchModeEnum.AND.getCode(), conditionDataList, exchange)); assertTrue(MatchStrategyFactory.match(MatchModeEnum.OR.getCode(), conditionDataList, exchange)); }
public CumulativeLatencies plus(CumulativeLatencies toAdd) { CumulativeLatencies result = new CumulativeLatencies(); result.publishLatency.add(this.publishLatency); result.publishDelayLatency.add(this.publishDelayLatency); result.endToEndLatency.add(this.endToEndLatency); result.publishLatency.add(toAdd.publishLatency); result.publishDelayLatency.add(toAdd.publishDelayLatency); result.endToEndLatency.add(toAdd.endToEndLatency); return result; }
@Test void zeroPlus() { CumulativeLatencies one = new CumulativeLatencies(); CumulativeLatencies two = new CumulativeLatencies(); CumulativeLatencies result = one.plus(two); assertThat(result) .satisfies( r -> { assertThat(r.publishLatency).isEqualTo(two.publishLatency); assertThat(r.publishDelayLatency).isEqualTo(two.publishDelayLatency); assertThat(r.endToEndLatency).isEqualTo(two.endToEndLatency); }); }
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); if (commandLine.hasOption('n')) { defaultMQAdminExt.setNamesrvAddr(commandLine.getOptionValue('n').trim()); } try { defaultMQAdminExt.start(); String group = commandLine.getOptionValue('g').trim(); ConsumerConnection cc = commandLine.hasOption('b') ? defaultMQAdminExt.examineConsumerConnectionInfo(group, commandLine.getOptionValue('b').trim()) : defaultMQAdminExt.examineConsumerConnectionInfo(group); boolean jstack = commandLine.hasOption('s'); if (!commandLine.hasOption('i')) { int i = 1; long now = System.currentTimeMillis(); final TreeMap<String/* clientId */, ConsumerRunningInfo> criTable = new TreeMap<>(); System.out.printf("%-10s %-40s %-20s %s%n", "#Index", "#ClientId", "#Version", "#ConsumerRunningInfoFile"); for (Connection conn : cc.getConnectionSet()) { try { ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, conn.getClientId(), jstack); if (consumerRunningInfo != null) { criTable.put(conn.getClientId(), consumerRunningInfo); String filePath = now + "/" + conn.getClientId(); MixAll.string2FileNotSafe(consumerRunningInfo.formatString(), filePath); System.out.printf("%-10d %-40s %-20s %s%n", i++, conn.getClientId(), MQVersion.getVersionDesc(conn.getVersion()), filePath); } } catch (Exception e) { e.printStackTrace(); } } if (!criTable.isEmpty()) { boolean subSame = ConsumerRunningInfo.analyzeSubscription(criTable); boolean rebalanceOK = subSame && ConsumerRunningInfo.analyzeRebalance(criTable); if (subSame) { System.out.printf("%n%nSame subscription in the same group of consumer"); System.out.printf("%n%nRebalance %s%n", rebalanceOK ? "OK" : "Failed"); Iterator<Entry<String, ConsumerRunningInfo>> it = criTable.entrySet().iterator(); while (it.hasNext()) { Entry<String, ConsumerRunningInfo> next = it.next(); String result = ConsumerRunningInfo.analyzeProcessQueue(next.getKey(), next.getValue()); if (result.length() > 0) { System.out.printf("%s", result); } } } else { System.out.printf("%n%nWARN: Different subscription in the same group of consumer!!!"); } } } else { String clientId = commandLine.getOptionValue('i').trim(); ConsumerRunningInfo consumerRunningInfo = defaultMQAdminExt.getConsumerRunningInfo(group, clientId, jstack); if (consumerRunningInfo != null) { System.out.printf("%s", consumerRunningInfo.formatString()); } } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
@Test public void testExecute() throws SubCommandException { ConsumerStatusSubCommand cmd = new ConsumerStatusSubCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[] {"-g default-group", "-i cid_one", String.format("-n localhost:%d", nameServerMocker.listenPort())}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); cmd.execute(commandLine, options, null); }
public <OutputT extends @NonNull Object> CsvIOParse<T> withCustomRecordParsing( String fieldName, SerializableFunction<String, OutputT> customRecordParsingFn) { Map<String, SerializableFunction<String, Object>> customProcessingMap = getConfigBuilder().getOrCreateCustomProcessingMap(); customProcessingMap.put(fieldName, customRecordParsingFn::apply); getConfigBuilder().setCustomProcessingMap(customProcessingMap); return this; }
@Test public void givenMultipleCustomParsingLambdas_parsesPOJOs() { PCollection<String> records = csvRecords( pipeline, "instant,instantList", "2024-01-23@10:00:05,10-00-05-2024-01-23;12-59-59-2024-01-24"); TimeContaining want = timeContaining( Instant.parse("2024-01-23T10:00:05.000Z"), Arrays.asList( Instant.parse("2024-01-23T10:00:05.000Z"), Instant.parse("2024-01-24T12:59:59.000Z"))); CsvIOParse<TimeContaining> underTest = underTest( TIME_CONTAINING_SCHEMA, CSVFormat.DEFAULT .withHeader("instant", "instantList") .withAllowDuplicateHeaderNames(false), new HashMap<>(), timeContainingFromRowFn(), TIME_CONTAINING_CODER) .withCustomRecordParsing( "instant", input -> DateTimeFormat.forPattern("yyyy-MM-dd@HH:mm:ss") .parseDateTime(input) .toInstant()) .withCustomRecordParsing("instantList", instantListParsingLambda()); CsvIOParseResult<TimeContaining> result = records.apply(underTest); PAssert.that(result.getOutput()).containsInAnyOrder(want); PAssert.that(result.getErrors()).empty(); pipeline.run(); }
public static FromEndOfWindow pastEndOfWindow() { return new FromEndOfWindow(); }
@Test public void testToStringExcludesNeverTrigger() { TriggerStateMachine trigger = AfterWatermarkStateMachine.pastEndOfWindow() .withEarlyFirings(NeverStateMachine.ever()) .withLateFirings(NeverStateMachine.ever()); assertEquals("AfterWatermark.pastEndOfWindow()", trigger.toString()); }
public boolean unblock() { final AtomicBuffer buffer = this.buffer; final long headPosition = buffer.getLongVolatile(headPositionIndex); final long tailPosition = buffer.getLongVolatile(tailPositionIndex); if (headPosition == tailPosition) { return false; } final int mask = capacity - 1; final int consumerIndex = (int)(headPosition & mask); final int producerIndex = (int)(tailPosition & mask); boolean unblocked = false; int length = buffer.getIntVolatile(consumerIndex); if (length < 0) { buffer.putInt(typeOffset(consumerIndex), PADDING_MSG_TYPE_ID); buffer.putIntOrdered(lengthOffset(consumerIndex), -length); unblocked = true; } else if (0 == length) { // go from (consumerIndex to producerIndex) or (consumerIndex to capacity) final int limit = producerIndex > consumerIndex ? producerIndex : capacity; int i = consumerIndex + ALIGNMENT; do { // read the top int of every long (looking for length aligned to 8=ALIGNMENT) length = buffer.getIntVolatile(i); if (0 != length) { if (scanBackToConfirmStillZeroed(buffer, i, consumerIndex)) { buffer.putInt(typeOffset(consumerIndex), PADDING_MSG_TYPE_ID); buffer.putIntOrdered(lengthOffset(consumerIndex), i - consumerIndex); unblocked = true; } break; } i += ALIGNMENT; } while (i < limit); } return unblocked; }
@Test void shouldNotUnblockGapWithMessageRaceWhenScanForwardTakesAnInterrupt() { final int messageLength = ALIGNMENT * 4; when(buffer.getLongVolatile(HEAD_COUNTER_INDEX)).thenReturn((long)messageLength); when(buffer.getLongVolatile(TAIL_COUNTER_INDEX)).thenReturn((long)messageLength * 3); when(buffer.getIntVolatile(messageLength * 2)).thenReturn(0).thenReturn(messageLength); when(buffer.getIntVolatile(messageLength * 2 + ALIGNMENT)).thenReturn(7); assertFalse(ringBuffer.unblock()); verify(buffer, never()).putInt(typeOffset(messageLength), PADDING_MSG_TYPE_ID); }
public static void addDescriptor(String interfaceName, ServiceDescriptor serviceDescriptor) { SERVICE_DESCRIPTOR_MAP.put(interfaceName, serviceDescriptor); }
@Test void addDescriptor() { ServiceDescriptor descriptor = Mockito.mock(ServiceDescriptor.class); StubSuppliers.addDescriptor(serviceName, descriptor); Assertions.assertEquals(descriptor, StubSuppliers.getServiceDescriptor(serviceName)); }
@Override public DistroData getDatumSnapshot(String targetServer) { Member member = memberManager.find(targetServer); if (checkTargetServerStatusUnhealthy(member)) { throw new DistroException( String.format("[DISTRO] Cancel get snapshot caused by target server %s unhealthy", targetServer)); } DistroDataRequest request = new DistroDataRequest(); request.setDataOperation(DataOperation.SNAPSHOT); try { Response response = clusterRpcClientProxy .sendRequest(member, request, DistroConfig.getInstance().getLoadDataTimeoutMillis()); if (checkResponse(response)) { return ((DistroDataResponse) response).getDistroData(); } else { throw new DistroException( String.format("[DISTRO-FAILED] Get snapshot request to %s failed, code: %d, message: %s", targetServer, response.getErrorCode(), response.getMessage())); } } catch (NacosException e) { throw new DistroException("[DISTRO-FAILED] Get distro snapshot failed! ", e); } }
@Test void testGetDatumSnapshotForMemberUnhealthy() { assertThrows(DistroException.class, () -> { when(memberManager.find(member.getAddress())).thenReturn(member); transportAgent.getDatumSnapshot(member.getAddress()); }); }
public Optional<Integer> declareManagedMemoryUseCaseAtOperatorScope( ManagedMemoryUseCase managedMemoryUseCase, int weight) { checkNotNull(managedMemoryUseCase); checkArgument( managedMemoryUseCase.scope == ManagedMemoryUseCase.Scope.OPERATOR, "Use case is not operator scope."); checkArgument(weight > 0, "Weights for operator scope use cases must be greater than 0."); return Optional.ofNullable( managedMemoryOperatorScopeUseCaseWeights.put(managedMemoryUseCase, weight)); }
@Test void testDeclareManagedMemoryOperatorScopeUseCaseFailZeroWeight() { assertThatThrownBy( () -> transformation.declareManagedMemoryUseCaseAtOperatorScope( ManagedMemoryUseCase.OPERATOR, 0)) .isInstanceOf(IllegalArgumentException.class); }
public static Optional<List<Locale.LanguageRange>> getAcceptableLanguages() { return Optional.ofNullable(ACCEPT_LANGUAGE_CONTEXT_KEY.get()); }
@Test void getAcceptableLanguages() { when(clientConnectionManager.getAcceptableLanguages(any())) .thenReturn(Optional.empty()); assertTrue(getRequestAttributes().getAcceptableLanguagesList().isEmpty()); when(clientConnectionManager.getAcceptableLanguages(any())) .thenReturn(Optional.of(Locale.LanguageRange.parse("en,ja"))); assertEquals(List.of("en", "ja"), getRequestAttributes().getAcceptableLanguagesList()); }
public RegistryBuilder server(String server) { this.server = server; return getThis(); }
@Test void server() { RegistryBuilder builder = new RegistryBuilder(); builder.server("server"); Assertions.assertEquals("server", builder.build().getServer()); }
@Override public void decode(final ChannelHandlerContext context, final ByteBuf in, final List<Object> out) { while (isValidHeader(in.readableBytes())) { if (startupPhase) { handleStartupPhase(in, out); return; } int payloadLength = in.getInt(in.readerIndex() + 1); if (in.readableBytes() < MESSAGE_TYPE_LENGTH + payloadLength) { return; } byte type = in.getByte(in.readerIndex()); CommandPacketType commandPacketType = OpenGaussCommandPacketType.valueOf(type); if (requireAggregation(commandPacketType)) { pendingMessages.add(in.readRetainedSlice(MESSAGE_TYPE_LENGTH + payloadLength)); } else if (pendingMessages.isEmpty()) { out.add(in.readRetainedSlice(MESSAGE_TYPE_LENGTH + payloadLength)); } else { handlePendingMessages(context, in, out, payloadLength); } } }
@Test void assertDecode() { when(byteBuf.readableBytes()).thenReturn(51, 47, 0); List<Object> out = new LinkedList<>(); new OpenGaussPacketCodecEngine().decode(context, byteBuf, out); assertThat(out.size(), is(1)); }
public static Map<String, String[]> getQueryMap(String query) { Map<String, String[]> map = new HashMap<>(); String[] params = query.split(PARAM_CONCATENATE); for (String param : params) { String[] paramSplit = param.split("="); if (paramSplit.length == 0) { continue; // We found no key-/value-pair, so continue on the next param } String name = decodeQuery(paramSplit[0]); // hack for SOAP request (generally) if (name.trim().startsWith("<?")) { // $NON-NLS-1$ map.put(" ", new String[] {query}); //blank name // $NON-NLS-1$ return map; } // the post payload is not key=value if((param.startsWith("=") && paramSplit.length == 1) || paramSplit.length > 2) { map.put(" ", new String[] {query}); //blank name // $NON-NLS-1$ return map; } String value = ""; if(paramSplit.length>1) { value = decodeQuery(paramSplit[1]); } String[] known = map.get(name); if(known == null) { known = new String[] {value}; } else { String[] tmp = new String[known.length+1]; tmp[tmp.length-1] = value; System.arraycopy(known, 0, tmp, 0, known.length); known = tmp; } map.put(name, known); } return map; }
@Test void testGetQueryMapWithEmptyValue() { String query = "postalCode=59115&qrcode="; Map<String, String[]> params = RequestViewHTTP.getQueryMap(query); Assertions.assertNotNull(params); Assertions.assertEquals(2, params.size()); String[] param1 = params.get("postalCode"); Assertions.assertNotNull(param1); Assertions.assertEquals(1, param1.length); Assertions.assertEquals("59115", param1[0]); String[] param2 = params.get("qrcode"); Assertions.assertNotNull(param2); Assertions.assertEquals(1, param2.length); Assertions.assertEquals("", param2[0]); }
public static <T> SVM<T> fit(T[] x, MercerKernel<T> kernel) { return fit(x, kernel, 0.5, 1E-3); }
@Test public void testSinCos() throws Exception { System.out.println("SinCos"); CSVFormat format = CSVFormat.Builder.create().setDelimiter('\t').build(); double[][] data = Read.csv(Paths.getTestData("clustering/sincos.txt"), format).toArray(); SVM<double[]> model = SVM.fit(data, new GaussianKernel(0.5)); double[] x = new double[51]; double[] y = new double[51]; for (int i = 0; i < x.length; i++) { x[i] = -2 + i * 0.1; y[i] = -2 + i * 0.1; } double[][] grid = new double[51][51]; for (int i = 0; i < grid.length; i++) { for (int j = 0; j < grid[i].length; j++) { double[] point = {-2 + i * 0.1, -2 + j * 0.1}; grid[j][i] = model.score(point); } } // ScatterPlot.of(data).canvas().window(); // Heatmap.of(x, y, grid).canvas().window(); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException { try { final String target = new DefaultUrlProvider(session.getHost()).toUrl(renamed).find(DescriptiveUrl.Type.provider).getUrl(); if(session.getFeature(Lock.class) != null && status.getLockId() != null) { // Indicate that the client has knowledge of that state token session.getClient().move(new DAVPathEncoder().encode(file), file.isDirectory() ? String.format("%s/", target) : target, status.isExists(), Collections.singletonMap(HttpHeaders.IF, String.format("(<%s>)", status.getLockId()))); } else { session.getClient().move(new DAVPathEncoder().encode(file), file.isDirectory() ? String.format("%s/", target) : target, status.isExists()); } // Copy original file attributes return renamed.withAttributes(file.attributes()); } catch(SardineException e) { throw new DAVExceptionMappingService().map("Cannot rename {0}", e, file); } catch(IOException e) { throw new HttpExceptionMappingService().map(e, file); } }
@Test public void testMoveOverride() throws Exception { final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new DAVTouchFeature(session).touch(test, new TransferStatus()); final Path target = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new DAVTouchFeature(session).touch(target, new TransferStatus()); assertThrows(ConflictException.class, () -> new DAVMoveFeature(session).move(test, target, new TransferStatus().exists(false), new Delete.DisabledCallback(), new DisabledConnectionCallback())); new DAVMoveFeature(session).move(test, target, new TransferStatus().exists(true), new Delete.DisabledCallback(), new DisabledConnectionCallback()); assertFalse(new DAVFindFeature(session).find(test)); assertTrue(new DAVFindFeature(session).find(target)); new DAVDeleteFeature(session).delete(Collections.<Path>singletonList(target), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@ManagedAttribute(description = "check if the job executor is activated") public boolean isJobExecutorActivated() { return jobExecutor != null && jobExecutor.isActive(); }
@Test public void TestIsJobExecutorActivatedFalse() { when(jobExecutor.isActive()).thenReturn(false); boolean result = jobExecutorMbean.isJobExecutorActivated(); verify(jobExecutor).isActive(); assertThat(result).isFalse(); }
@Override public <T extends Persisted> String save(T model) throws ValidationException { if (model instanceof UserImpl.LocalAdminUser) { throw new IllegalStateException("Cannot modify local root user, this is a bug."); } final String userId = super.save(model); serverEventBus.post(UserChangedEvent.create(userId)); return userId; }
@Test public void testSave() throws Exception { final User user = userService.create(); user.setName("TEST"); user.setFullName("TEST"); user.setEmail("test@example.com"); user.setTimeZone(DateTimeZone.UTC); user.setPassword("TEST"); user.setPermissions(Collections.<String>emptyList()); final String id = userService.save(user); final DBObject query = BasicDBObjectBuilder.start("_id", new ObjectId(id)).get(); @SuppressWarnings("deprecation") final DBObject dbObject = mongoConnection.getDatabase().getCollection(UserImpl.COLLECTION_NAME).findOne(query); assertThat(dbObject.get("username")).isEqualTo("TEST"); assertThat(dbObject.get("full_name")).isEqualTo("TEST"); assertThat(dbObject.get("email")).isEqualTo("test@example.com"); assertThat(dbObject.get("timezone")).isEqualTo("UTC"); assertThat((String) dbObject.get("password")).isNotEmpty(); }
public CreateStreamCommand createStreamCommand(final KsqlStructuredDataOutputNode outputNode) { return new CreateStreamCommand( outputNode.getSinkName().get(), outputNode.getSchema(), outputNode.getTimestampColumn(), outputNode.getKsqlTopic().getKafkaTopicName(), Formats.from(outputNode.getKsqlTopic()), outputNode.getKsqlTopic().getKeyFormat().getWindowInfo(), Optional.of(outputNode.getOrReplace()), Optional.of(false) ); }
@Test public void shouldNotThrowOnCreateStreamIfNotExistsIsSet() { // Given: final CreateStream ddlStatement = new CreateStream(SOME_NAME, STREAM_ELEMENTS, false, true, withProperties, false); // When: final CreateStreamCommand result = createSourceFactory .createStreamCommand(ddlStatement, ksqlConfig); // Then: assertThat(result.getSourceName(), is(SOME_NAME)); }
@Override public Optional<MaintenanceDomain> getMaintenanceDomain(MdId mdName) { log.debug("Retrieving MD {} from distributed store", mdName); return store.getMaintenanceDomain(mdName); }
@Test public void testGetMaintenanceDomain() { Optional<MaintenanceDomain> md = service.getMaintenanceDomain(MdIdCharStr.asMdId("test-md-1")); assertTrue(md.isPresent()); assertEquals(1, md.get().mdNumericId()); assertEquals(2, md.get().maintenanceAssociationList().size()); //Now try an invalid name Optional<MaintenanceDomain> mdInvalid = service.getMaintenanceDomain(MdIdCharStr.asMdId("test-md-3")); assertFalse(mdInvalid.isPresent()); }
@Override public PathAttributes toAttributes(final DavResource resource) { final PathAttributes attributes = super.toAttributes(resource); final Map<QName, String> properties = resource.getCustomPropsNS(); if(null != properties) { if(properties.containsKey(OC_FILEID_CUSTOM_NAMESPACE)) { final String value = properties.get(OC_FILEID_CUSTOM_NAMESPACE); attributes.setFileId(value); } if(resource.isDirectory()) { if(properties.containsKey(OC_SIZE_CUSTOM_NAMESPACE)) { final String value = properties.get(OC_SIZE_CUSTOM_NAMESPACE); attributes.setSize(Long.parseLong(value)); } } if(properties.containsKey(OC_CHECKSUMS_CUSTOM_NAMESPACE)) { for(String v : StringUtils.split(properties.get(OC_CHECKSUMS_CUSTOM_NAMESPACE), StringUtils.SPACE)) { try { attributes.setChecksum(new Checksum(HashAlgorithm.valueOf(StringUtils.lowerCase(StringUtils.split(v, ":")[0])), StringUtils.lowerCase(StringUtils.split(v, ":")[1]))); } catch(IllegalArgumentException e) { log.warn(String.format("Unsupported checksum %s", v)); } } } } return attributes; }
@Test public void testCustomModified_NotModified() throws Exception { final NextcloudAttributesFinderFeature f = new NextcloudAttributesFinderFeature(null); final DavResource mock = mock(DavResource.class); Map<QName, String> map = new HashMap<>(); final String ts = "Mon, 29 Oct 2018 21:14:06 UTC"; map.put(DAVTimestampFeature.LAST_MODIFIED_CUSTOM_NAMESPACE, ts); map.put(DAVTimestampFeature.LAST_MODIFIED_SERVER_CUSTOM_NAMESPACE, "Thu, 01 Nov 2018 15:31:57 UTC"); when(mock.getModified()).thenReturn(new DateTime("2018-11-01T15:31:57Z").toDate()); when(mock.getCustomPropsNS()).thenReturn(map); final PathAttributes attrs = f.toAttributes(mock); assertEquals(new RFC1123DateFormatter().parse(ts).getTime(), attrs.getModificationDate()); }
@Override public void upgrade() { Map<String, Set<String>> encryptedFieldsByInputType = getEncryptedFieldsByInputType(); if (getMigratedField().equals(encryptedFieldsByInputType)) { LOG.debug("Migration already completed."); return; } final MongoCollection<Document> collection = getCollection(); final FindIterable<Document> documents = collection.find(in(FIELD_TYPE, encryptedFieldsByInputType.keySet())); documents.forEach(doc -> { @SuppressWarnings("unchecked") final Map<String, Object> config = new HashMap<>((Map<String, Object>) doc.getOrDefault(FIELD_CONFIGURATION, Map.of())); final Set<String> encryptedFields = encryptedFieldsByInputType.getOrDefault((String) doc.get(FIELD_TYPE), Set.of()); encryptedFields.forEach(fieldName -> { final Object value = config.get(fieldName); // Assume that in case of a Map, the value is already encrypted and doesn't need conversion. if (config.containsKey(fieldName) && !(value instanceof Map)) { final EncryptedValue encryptedValue = objectMapper.convertValue(value, EncryptedValue.class); config.put(fieldName, dbObjectMapper.convertValue(encryptedValue, TypeReferences.MAP_STRING_OBJECT)); } }); collection.updateOne(eq(FIELD_ID, doc.getObjectId(FIELD_ID)), Updates.set(FIELD_CONFIGURATION, config)); }); saveMigrationCompleted(encryptedFieldsByInputType); }
@SuppressWarnings("unchecked") @Test public void migrateNullSecret() { migration.upgrade(); final Document migrated = collection.find(Filters.eq(FIELD_TITLE, "null-secret")).first(); assertThat(migrated).isNotNull().satisfies(doc -> assertThat((Map<String, Object>) doc.get(FIELD_CONFIGURATION)).satisfies(config -> { final EncryptedValue encryptedValue = dbObjectMapper.convertValue(config.get(ENCRYPTED_FIELD), EncryptedValue.class); assertThat(encryptedValue.isSet()).isFalse(); }) ); }
public String toString() { // ensure minimum precision of 3 decimal places by using our own 3-decimal-place formatter when we have no nanos. final DateTimeFormatter formatter = (instant.getNano() == 0 ? ISO_INSTANT_MILLIS : DateTimeFormatter.ISO_INSTANT); return formatter.format(instant); }
@Test public void testParsingDateTimeWithCommaDecimalStyleLocale() throws Exception { final Locale germanLocale = Locale.GERMANY; final Clock germanClock = Clock.systemUTC().withZone(ZoneId.of("+02:00")); // DST doesn't matter // comma-decimal final Timestamp t1 = new Timestamp("2014-09-23T13:49:52,987654321Z", germanClock, germanLocale); assertEquals("2014-09-23T13:49:52.987654321Z", t1.toString()); // fallback to stop-decimal final Timestamp t2 = new Timestamp("2014-09-23T13:49:52.987654321Z", germanClock, germanLocale); assertEquals("2014-09-23T13:49:52.987654321Z", t2.toString()); }
@ExecuteOn(TaskExecutors.IO) @Post(consumes = MediaType.APPLICATION_YAML) @Operation(tags = {"Flows"}, summary = "Create a flow from yaml source") public HttpResponse<FlowWithSource> create( @Parameter(description = "The flow") @Body String flow ) throws ConstraintViolationException { Flow flowParsed = yamlFlowParser.parse(flow, Flow.class); return HttpResponse.ok(doCreate(flowParsed, flow)); }
@Test void updateTaskFlow() throws InternalException { String flowId = IdUtils.create(); Flow flow = generateFlowWithFlowable(flowId, "io.kestra.unittest", "a"); Flow result = client.toBlocking().retrieve(POST("/api/v1/flows", flow), Flow.class); assertThat(result.getId(), is(flow.getId())); Task task = generateTask("test2", "updated task"); Flow get = client.toBlocking().retrieve( PATCH("/api/v1/flows/" + flow.getNamespace() + "/" + flow.getId() + "/" + task.getId(), task), Flow.class ); assertThat(get.getId(), is(flow.getId())); assertThat(((Return) get.findTaskByTaskId("test2")).getFormat(), is("updated task")); HttpClientResponseException e = assertThrows(HttpClientResponseException.class, () -> { client.toBlocking().retrieve( PATCH("/api/v1/flows/" + flow.getNamespace() + "/" + flow.getId() + "/test6", task), Flow.class ); }); assertThat(e.getStatus(), is(UNPROCESSABLE_ENTITY)); e = assertThrows(HttpClientResponseException.class, () -> { client.toBlocking().retrieve( PATCH("/api/v1/flows/" + flow.getNamespace() + "/" + flow.getId() + "/test6", generateTask("test6", "updated task")), Flow.class ); }); assertThat(e.getStatus(), is(NOT_FOUND)); }
public FEELFnResult<Boolean> invoke(@ParameterName( "string" ) String string, @ParameterName( "match" ) String match) { if ( string == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "string", "cannot be null")); } if ( match == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "match", "cannot be null")); } return FEELFnResult.ofResult( string.endsWith( match ) ); }
@Test void invokeEndsWith() { FunctionTestUtil.assertResult(endsWithFunction.invoke("test", "t"), true); FunctionTestUtil.assertResult(endsWithFunction.invoke("test", "st"), true); FunctionTestUtil.assertResult(endsWithFunction.invoke("test", "est"), true); FunctionTestUtil.assertResult(endsWithFunction.invoke("test", "test"), true); }
@Override public boolean check(final Session<?> session, final CancelCallback callback) throws BackgroundException { final Host bookmark = session.getHost(); if(bookmark.getProtocol().isHostnameConfigurable() && StringUtils.isBlank(bookmark.getHostname())) { throw new ConnectionCanceledException(); } if(session.isConnected()) { if(log.isDebugEnabled()) { log.debug(String.format("Skip opening connection for session %s", session)); } // Connection already open return false; } // Obtain password from keychain or prompt synchronized(login) { login.validate(bookmark, prompt, new LoginOptions(bookmark.getProtocol())); } this.connect(session, callback); return true; }
@Test(expected = LoginCanceledException.class) public void testNoResolveForHTTPProxy() throws Exception { final Session session = new NullSession(new Host(new TestProtocol(), "unknownhost.local", new Credentials("user", ""))) { @Override public boolean isConnected() { return false; } }; final LoginConnectionService s = new LoginConnectionService(new DisabledLoginCallback(), new DisabledHostKeyCallback(), new DisabledPasswordStore(), new DisabledProgressListener(), new DisabledProxyFinder() { @Override public Proxy find(final String target) { return new Proxy(Proxy.Type.HTTP, "proxy.local", 6666); } }); s.check(session, new DisabledCancelCallback()); }
public Response filter(FilterableRequestSpecification requestSpec, FilterableResponseSpecification responseSpec, FilterContext ctx) { final CookieOrigin cookieOrigin = cookieOriginFromUri(requestSpec.getURI()); for (Cookie cookie : cookieStore.getCookies()) { if (cookieSpec.match(cookie, cookieOrigin) && allowMultipleCookiesWithTheSameNameOrCookieNotPreviouslyDefined(requestSpec, cookie)) { requestSpec.cookie(cookie.getName(), cookie.getValue()); } } final Response response = ctx.next(requestSpec, responseSpec); List<Cookie> responseCookies = extractResponseCookies(response, cookieOrigin); cookieStore.addCookies(responseCookies.toArray(new Cookie[0])); return response; }
@Test public void addCookiesToMatchingUrlRequest() { cookieFilter.filter(reqOriginDomain, response, testFilterContext); cookieFilter.filter(reqOriginDomain, response, testFilterContext); assertThat(reqOriginDomain.getCookies().size(), Matchers.is(2)); assertThat(reqOriginDomain.getCookies().hasCookieWithName("cookieName1"), Matchers.is(true)); assertThat(reqOriginDomain.getCookies().getValue("cookieName1"), Matchers.is("cookieValue1")); assertThat(reqOriginDomain.getCookies().hasCookieWithName("cookieName2"), Matchers.is(true)); assertThat(reqOriginDomain.getCookies().getValue("cookieName2"), Matchers.is("cookieValue2")); }
public void setSteadyFairShare(Resource resource) { steadyFairShareMB.set(resource.getMemorySize()); steadyFairShareVCores.set(resource.getVirtualCores()); if (customResources != null) { customResources.setSteadyFairShare(resource); } }
@Test public void testSetSteadyFairShare() { FSQueueMetrics metrics = setupMetrics(RESOURCE_NAME); Resource res = Resource.newInstance(2048L, 4, ImmutableMap.of(RESOURCE_NAME, 20L)); metrics.setSteadyFairShare(res); assertEquals(getErrorMessage("steadyFairShareMB"), 2048L, metrics.getSteadyFairShareMB()); assertEquals(getErrorMessage("steadyFairShareVcores"), 4L, metrics.getSteadyFairShareVCores()); Resource steadyFairShare = metrics.getSteadyFairShare(); assertEquals(getErrorMessage("steadyFairShareMB"), 2048L, steadyFairShare.getMemorySize()); assertEquals(getErrorMessage("steadyFairShareVcores"), 4L, steadyFairShare.getVirtualCores()); assertEquals(getErrorMessage("steadyFairShare for resource: " + RESOURCE_NAME), 20L, steadyFairShare.getResourceValue(RESOURCE_NAME)); res = Resource.newInstance(2049L, 5); metrics.setSteadyFairShare(res); assertEquals(getErrorMessage("steadyFairShareMB"), 2049L, metrics.getSteadyFairShareMB()); assertEquals(getErrorMessage("steadyFairShareVcores"), 5L, metrics.getSteadyFairShareVCores()); steadyFairShare = metrics.getSteadyFairShare(); assertEquals(getErrorMessage("steadyFairShareMB"), 2049L, steadyFairShare.getMemorySize()); assertEquals(getErrorMessage("steadyFairShareVcores"), 5L, steadyFairShare.getVirtualCores()); assertEquals(getErrorMessage("steadyFairShare for resource: " + RESOURCE_NAME), 0, steadyFairShare.getResourceValue(RESOURCE_NAME)); }
public static ReadOnlyHttp2Headers trailers(boolean validateHeaders, AsciiString... otherHeaders) { return new ReadOnlyHttp2Headers(validateHeaders, EMPTY_ASCII_STRINGS, otherHeaders); }
@Test public void notKeyValuePairThrows() { assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { ReadOnlyHttp2Headers.trailers(false, new AsciiString[]{ null }); } }); }
public RuntimeOptionsBuilder parse(Map<String, String> properties) { return parse(properties::get); }
@Test void should_parse_rerun_file() throws IOException { Path path = mockFileResource("classpath:path/to.feature"); properties.put(Constants.FEATURES_PROPERTY_NAME, "@" + path.toString()); RuntimeOptions options = cucumberPropertiesParser.parse(properties).build(); assertThat(options.getFeaturePaths(), containsInAnyOrder(URI.create("classpath:path/to.feature"))); }
public static String serialize(AbstractHealthChecker healthChecker) { try { return MAPPER.writeValueAsString(healthChecker); } catch (JsonProcessingException e) { throw new NacosSerializationException(healthChecker.getClass(), e); } }
@Test void testSerialize() { Tcp tcp = new Tcp(); String actual = HealthCheckerFactory.serialize(tcp); assertTrue(actual.contains("\"type\":\"TCP\"")); }
public static int symLink(String target, String linkname) throws IOException{ if (target == null || linkname == null) { LOG.warn("Can not create a symLink with a target = " + target + " and link =" + linkname); return 1; } // Run the input paths through Java's File so that they are converted to the // native OS form File targetFile = new File( Path.getPathWithoutSchemeAndAuthority(new Path(target)).toString()); File linkFile = new File( Path.getPathWithoutSchemeAndAuthority(new Path(linkname)).toString()); String[] cmd = Shell.getSymlinkCommand( targetFile.toString(), linkFile.toString()); ShellCommandExecutor shExec; try { if (Shell.WINDOWS && linkFile.getParentFile() != null && !new Path(target).isAbsolute()) { // Relative links on Windows must be resolvable at the time of // creation. To ensure this we run the shell command in the directory // of the link. // shExec = new ShellCommandExecutor(cmd, linkFile.getParentFile()); } else { shExec = new ShellCommandExecutor(cmd); } shExec.execute(); } catch (Shell.ExitCodeException ec) { int returnVal = ec.getExitCode(); if (Shell.WINDOWS && returnVal == SYMLINK_NO_PRIVILEGE) { LOG.warn("Fail to create symbolic links on Windows. " + "The default security settings in Windows disallow non-elevated " + "administrators and all non-administrators from creating symbolic links. " + "This behavior can be changed in the Local Security Policy management console"); } else if (returnVal != 0) { LOG.warn("Command '" + StringUtils.join(" ", cmd) + "' failed " + returnVal + " with: " + ec.getMessage()); } return returnVal; } catch (IOException e) { if (LOG.isDebugEnabled()) { LOG.debug("Error while create symlink " + linkname + " to " + target + "." + " Exception: " + StringUtils.stringifyException(e)); } throw e; } return shExec.getExitCode(); }
@Test public void testSymlinkSameFile() throws IOException { File file = new File(del, FILE); Verify.delete(file); // Create a symbolic link // The operation should succeed int result = FileUtil.symLink(file.getAbsolutePath(), file.getAbsolutePath()); Assert.assertEquals(0, result); }
@Override public void loginFailure(HttpRequest request, AuthenticationException e) { checkRequest(request); requireNonNull(e, "AuthenticationException can't be null"); if (!LOGGER.isDebugEnabled()) { return; } Source source = e.getSource(); LOGGER.debug("login failure [cause|{}][method|{}][provider|{}|{}][IP|{}|{}][login|{}]", emptyIfNull(e.getMessage()), source.getMethod(), source.getProvider(), source.getProviderName(), request.getRemoteAddr(), getAllIps(request), preventLogFlood(emptyIfNull(e.getLogin()))); }
@Test public void login_failure_creates_DEBUG_log_with_method_provider_and_login() { AuthenticationException exception = newBuilder() .setSource(Source.realm(Method.BASIC, "some provider name")) .setMessage("something got terribly wrong") .setLogin("BaR") .build(); underTest.loginFailure(mockRequest(), exception); verifyLog("login failure [cause|something got terribly wrong][method|BASIC][provider|REALM|some provider name][IP||][login|BaR]", Set.of("logout", "login success")); }
public GroupCapacity getGroupCapacity(String groupId) { GroupCapacityMapper groupCapacityMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(), TableConstant.GROUP_CAPACITY); String sql = groupCapacityMapper.select( Arrays.asList("id", "quota", "`usage`", "`max_size`", "max_aggr_count", "max_aggr_size", "group_id"), Collections.singletonList("group_id")); List<GroupCapacity> list = jdbcTemplate.query(sql, new Object[] {groupId}, GROUP_CAPACITY_ROW_MAPPER); if (list.isEmpty()) { return null; } return list.get(0); }
@Test void testGetGroupCapacity() { List<GroupCapacity> list = new ArrayList<>(); GroupCapacity groupCapacity = new GroupCapacity(); groupCapacity.setGroup("test"); list.add(groupCapacity); String groupId = "testId"; when(jdbcTemplate.query(anyString(), eq(new Object[] {groupId}), any(RowMapper.class))).thenReturn(list); GroupCapacity ret = service.getGroupCapacity(groupId); assertEquals(groupCapacity.getGroup(), ret.getGroup()); }
protected static VplsOperation getOptimizedVplsOperation(Deque<VplsOperation> operations) { if (operations.isEmpty()) { return null; } // no need to optimize if the queue contains only one operation if (operations.size() == 1) { return operations.getFirst(); } final VplsOperation firstOperation = operations.peekFirst(); final VplsOperation lastOperation = operations.peekLast(); final VplsOperation.Operation firstOp = firstOperation.op(); final VplsOperation.Operation lastOp = lastOperation.op(); if (firstOp.equals(VplsOperation.Operation.REMOVE)) { if (lastOp.equals(VplsOperation.Operation.REMOVE)) { // case 1: both first and last operation are REMOVE; do remove return firstOperation; } else if (lastOp.equals(VplsOperation.Operation.ADD)) { // case 2: if first is REMOVE, and last is ADD; do update return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.UPDATE); } else { // case 3: first is REMOVE, last is UPDATE; do update return lastOperation; } } else if (firstOp.equals(VplsOperation.Operation.ADD)) { if (lastOp.equals(VplsOperation.Operation.REMOVE)) { // case 4: first is ADD, last is REMOVE; nothing to do return null; } else if (lastOp.equals(VplsOperation.Operation.ADD)) { // case 5: both first and last are ADD, do add return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.ADD); } else { // case 6: first is ADD and last is update, do add return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.ADD); } } else { if (lastOp.equals(VplsOperation.Operation.REMOVE)) { // case 7: last is remove, do remove return lastOperation; } else if (lastOp.equals(VplsOperation.Operation.ADD)) { // case 8: do update only return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.UPDATE); } else { // case 9: from UPDATE to UPDATE // only need last UPDATE operation return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.UPDATE); } } }
@Test public void testOptimizeOperationsNoOptimize() { // empty queue Deque<VplsOperation> operations = new ArrayDeque<>(); VplsOperation vplsOperation = VplsOperationManager.getOptimizedVplsOperation(operations); assertNull(vplsOperation); // one operation VplsData vplsData = VplsData.of(VPLS1); vplsOperation = VplsOperation.of(vplsData, VplsOperation.Operation.ADD); operations.add(vplsOperation); VplsOperation result = VplsOperationManager.getOptimizedVplsOperation(operations); assertEquals(vplsOperation, result); }
public static String getMethodResourceName(Invoker<?> invoker, Invocation invocation){ return getMethodResourceName(invoker, invocation, false); }
@Test public void testGetResourceNameWithGroupAndVersion() throws NoSuchMethodException { Invoker invoker = mock(Invoker.class); URL url = URL.valueOf("dubbo://127.0.0.1:2181") .addParameter(CommonConstants.VERSION_KEY, "1.0.0") .addParameter(CommonConstants.GROUP_KEY, "grp1") .addParameter(CommonConstants.INTERFACE_KEY, DemoService.class.getName()); when(invoker.getUrl()).thenReturn(url); when(invoker.getInterface()).thenReturn(DemoService.class); Invocation invocation = mock(Invocation.class); Method method = DemoService.class.getDeclaredMethod("sayHello", String.class, int.class); when(invocation.getMethodName()).thenReturn(method.getName()); when(invocation.getParameterTypes()).thenReturn(method.getParameterTypes()); String resourceNameUseGroupAndVersion = DubboUtils.getMethodResourceName(invoker, invocation, true); assertEquals("com.alibaba.csp.sentinel.adapter.dubbo.provider.DemoService:1.0.0:grp1:sayHello(java.lang.String,int)", resourceNameUseGroupAndVersion); }
@Override public void onEvent(Event event) { if (EnvUtil.getStandaloneMode()) { return; } if (event instanceof ClientEvent.ClientVerifyFailedEvent) { syncToVerifyFailedServer((ClientEvent.ClientVerifyFailedEvent) event); } else { syncToAllServer((ClientEvent) event); } }
@Test void testOnClientVerifyFailedEventWithoutResponsible() { when(clientManager.isResponsibleClient(client)).thenReturn(false); distroClientDataProcessor.onEvent(new ClientEvent.ClientVerifyFailedEvent(CLIENT_ID, MOCK_TARGET_SERVER)); verify(distroProtocol, never()).syncToTarget(any(), any(), anyString(), anyLong()); verify(distroProtocol, never()).sync(any(), any()); }
public static void touch(String path, String fileName) throws IOException { FileUtils.touch(Paths.get(path, fileName).toFile()); }
@Test void testTouchWithFileName() throws IOException { File file = Paths.get(EnvUtil.getNacosTmpDir(), UUID.randomUUID().toString()).toFile(); assertFalse(file.exists()); DiskUtils.touch(file.getParent(), file.getName()); assertTrue(file.exists()); file.deleteOnExit(); }
public void add(TProtocol p) { events.addLast(p); }
@Test public void testOneOfEach() throws TException { final List<Byte> bytes = new ArrayList<Byte>(); bytes.add((byte) 1); final List<Short> shorts = new ArrayList<Short>(); shorts.add((short) 1); final List<Long> longs = new ArrayList<Long>(); longs.add((long) 1); OneOfEach a = new OneOfEach( true, false, (byte) 8, (short) 16, (int) 32, (long) 64, (double) 1234, "string", "å", false, ByteBuffer.wrap("a".getBytes()), bytes, shorts, longs); validate(a); }
@Override public Optional<Message<T>> getMessage() { return sourceRecord.getMessage(); }
@Test public void testCustomAck() { PulsarRecord pulsarRecord = Mockito.mock(PulsarRecord.class); SinkRecord sinkRecord = new SinkRecord<>(pulsarRecord, new Object()); sinkRecord.cumulativeAck(); Mockito.verify(pulsarRecord, Mockito.times(1)).cumulativeAck(); sinkRecord = new SinkRecord(Mockito.mock(Record.class), new Object()); try { sinkRecord.individualAck(); fail("Should throw runtime exception"); } catch (Exception e) { assertTrue(e instanceof RuntimeException); assertEquals(e.getMessage(), "SourceRecord class type must be PulsarRecord"); } }
@Override public void resetConfigStats(RedisClusterNode node) { RedisClient entry = getEntry(node); RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_RESETSTAT); syncFuture(f); }
@Test public void testResetConfigStats() { RedisClusterNode master = getFirstMaster(); connection.resetConfigStats(master); }
public static UMemberSelect create(UExpression expression, CharSequence identifier, UType type) { return new AutoValue_UMemberSelect(expression, StringName.of(identifier), type); }
@Test public void equality() { UType stringTy = UClassType.create("java.lang.String"); // int String.indexOf(int) UMethodType indexOfIntTy = UMethodType.create(UPrimitiveType.INT, UPrimitiveType.INT); // int String.indexOf(String) UMethodType indexOfStringTy = UMethodType.create(UPrimitiveType.INT, stringTy); UExpression fooLit = ULiteral.stringLit("foo"); UExpression barLit = ULiteral.stringLit("bar"); new EqualsTester() .addEqualityGroup(UMemberSelect.create(fooLit, "indexOf", indexOfIntTy)) .addEqualityGroup(UMemberSelect.create(fooLit, "indexOf", indexOfStringTy)) .addEqualityGroup(UMemberSelect.create(barLit, "indexOf", indexOfIntTy)) .testEquals(); }
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { final long timestamp = clock.getTime() / 1000; // oh it'd be lovely to use Java 7 here try { graphite.connect(); for (Map.Entry<String, Gauge> entry : gauges.entrySet()) { reportGauge(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Meter> entry : meters.entrySet()) { reportMetered(entry.getKey(), entry.getValue(), timestamp); } for (Map.Entry<String, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue(), timestamp); } graphite.flush(); } catch (IOException e) { LOGGER.warn("Unable to report to Graphite", graphite, e); } finally { try { graphite.close(); } catch (IOException e1) { LOGGER.warn("Error closing Graphite", graphite, e1); } } }
@Test public void reportsTimers() throws Exception { final Timer timer = mock(Timer.class); when(timer.getCount()).thenReturn(1L); when(timer.getMeanRate()).thenReturn(2.0); when(timer.getOneMinuteRate()).thenReturn(3.0); when(timer.getFiveMinuteRate()).thenReturn(4.0); when(timer.getFifteenMinuteRate()).thenReturn(5.0); final Snapshot snapshot = mock(Snapshot.class); when(snapshot.getMax()).thenReturn(TimeUnit.MILLISECONDS.toNanos(100)); when(snapshot.getMean()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(200)); when(snapshot.getMin()).thenReturn(TimeUnit.MILLISECONDS.toNanos(300)); when(snapshot.getStdDev()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(400)); when(snapshot.getMedian()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(500)); when(snapshot.get75thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(600)); when(snapshot.get95thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(700)); when(snapshot.get98thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(800)); when(snapshot.get99thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS.toNanos(900)); when(snapshot.get999thPercentile()).thenReturn((double) TimeUnit.MILLISECONDS .toNanos(1000)); when(timer.getSnapshot()).thenReturn(snapshot); reporter.report(map(), map(), map(), map(), map("timer", timer)); final InOrder inOrder = inOrder(graphite); inOrder.verify(graphite).connect(); inOrder.verify(graphite).send("prefix.timer.max", "100.00", timestamp); inOrder.verify(graphite).send("prefix.timer.mean", "200.00", timestamp); inOrder.verify(graphite).send("prefix.timer.min", "300.00", timestamp); inOrder.verify(graphite).send("prefix.timer.stddev", "400.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p50", "500.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p75", "600.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p95", "700.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p98", "800.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p99", "900.00", timestamp); inOrder.verify(graphite).send("prefix.timer.p999", "1000.00", timestamp); inOrder.verify(graphite).send("prefix.timer.count", "1", timestamp); inOrder.verify(graphite).send("prefix.timer.m1_rate", "3.00", timestamp); inOrder.verify(graphite).send("prefix.timer.m5_rate", "4.00", timestamp); inOrder.verify(graphite).send("prefix.timer.m15_rate", "5.00", timestamp); inOrder.verify(graphite).send("prefix.timer.mean_rate", "2.00", timestamp); inOrder.verify(graphite).flush(); inOrder.verify(graphite).close(); verifyNoMoreInteractions(graphite); reporter.close(); }
@Override public List<Integer> applyTransforms(List<Integer> originalGlyphIds) { List<Integer> intermediateGlyphsFromGsub = adjustRephPosition(originalGlyphIds); intermediateGlyphsFromGsub = repositionGlyphs(intermediateGlyphsFromGsub); for (String feature : FEATURES_IN_ORDER) { if (!gsubData.isFeatureSupported(feature)) { if (feature.equals(RKRF_FEATURE) && gsubData.isFeatureSupported(VATU_FEATURE)) { // Create your own rkrf feature from vatu feature intermediateGlyphsFromGsub = applyRKRFFeature( gsubData.getFeature(VATU_FEATURE), intermediateGlyphsFromGsub); } LOG.debug("the feature {} was not found", feature); continue; } LOG.debug("applying the feature {}", feature); ScriptFeature scriptFeature = gsubData.getFeature(feature); intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature, intermediateGlyphsFromGsub); } return Collections.unmodifiableList(intermediateGlyphsFromGsub); }
@Disabled @Test void testApplyTransforms_rkrf() { // given List<Integer> glyphsAfterGsub = Arrays.asList(588,597,595,602); // when List<Integer> result = gsubWorkerForDevanagari.applyTransforms(getGlyphIds("क्रब्रप्रह्र")); // then assertEquals(glyphsAfterGsub, result); }
public static List<PKafkaOffsetProxyResult> getBatchOffsets(List<PKafkaOffsetProxyRequest> requests) throws UserException { return PROXY_API.getBatchOffsets(requests); }
@Test public void testGetInfoFailed() throws UserException, RpcException { Backend backend = new Backend(1L, "127.0.0.1", 9050); backend.setBeRpcPort(8060); backend.setAlive(true); PProxyResult proxyResult = new PProxyResult(); StatusPB status = new StatusPB(); // cancelled status.statusCode = 1; status.errorMsgs = Lists.newArrayList("be process failed"); proxyResult.status = status; new Expectations() { { service.getBackendOrComputeNode(anyLong); result = backend; client.getInfo((TNetworkAddress) any, (PProxyRequest) any); result = new Future<PProxyResult>() { @Override public boolean cancel(boolean mayInterruptIfRunning) { return false; } @Override public boolean isCancelled() { return false; } @Override public boolean isDone() { return true; } @Override public PProxyResult get() throws InterruptedException, ExecutionException { return proxyResult; } @Override public PProxyResult get(long timeout, @NotNull TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return proxyResult; } }; } }; KafkaUtil.ProxyAPI api = new KafkaUtil.ProxyAPI(); LoadException e = Assert.assertThrows(LoadException.class, () -> api.getBatchOffsets(null)); Assert.assertTrue(e.getMessage().contains("be process failed")); }
public static List<TargetInfo> parseOptTarget(CommandLine cmd, AlluxioConfiguration conf) throws IOException { String[] targets; if (cmd.hasOption(TARGET_OPTION_NAME)) { String argTarget = cmd.getOptionValue(TARGET_OPTION_NAME); if (StringUtils.isBlank(argTarget)) { throw new IOException("Option " + TARGET_OPTION_NAME + " can not be blank."); } else if (argTarget.contains(TARGET_SEPARATOR)) { targets = argTarget.split(TARGET_SEPARATOR); } else { targets = new String[]{argTarget}; } } else { // By default we set on all targets (master/workers/job_master/job_workers) targets = new String[]{ROLE_MASTER, ROLE_JOB_MASTER, ROLE_WORKERS, ROLE_JOB_WORKERS}; } return getTargetInfos(targets, conf); }
@Test public void unrecognizedTarget() throws Exception { String allTargets = "localhost"; CommandLine mockCommandLine = mock(CommandLine.class); String[] mockArgs = new String[]{"--target", allTargets}; when(mockCommandLine.getArgs()).thenReturn(mockArgs); when(mockCommandLine.hasOption(LogLevel.TARGET_OPTION_NAME)).thenReturn(true); when(mockCommandLine.getOptionValue(LogLevel.TARGET_OPTION_NAME)).thenReturn(mockArgs[1]); assertThrows("Unrecognized target argument: localhost", IOException.class, () -> LogLevel.parseOptTarget(mockCommandLine, mConf)); }
@Override @Nonnull public <T> List<Future<T>> invokeAll(@Nonnull Collection<? extends Callable<T>> tasks) { throwRejectedExecutionExceptionIfShutdown(); ArrayList<Future<T>> result = new ArrayList<>(); for (Callable<T> task : tasks) { try { result.add(new CompletedFuture<>(task.call(), null)); } catch (Exception e) { result.add(new CompletedFuture<>(null, e)); } } return result; }
@Test void testRejectedInvokeAllWithEmptyList() { testRejectedExecutionException( testInstance -> testInstance.invokeAll(Collections.emptyList())); }
protected boolean isValidRequestor(HttpServletRequest request, Configuration conf) throws IOException { UserGroupInformation ugi = getUGI(request, conf); if (LOG.isDebugEnabled()) { LOG.debug("Validating request made by " + ugi.getUserName() + " / " + ugi.getShortUserName() + ". This user is: " + UserGroupInformation.getLoginUser()); } Set<String> validRequestors = new HashSet<String>(); validRequestors.addAll(DFSUtil.getAllNnPrincipals(conf)); try { validRequestors.add( SecurityUtil.getServerPrincipal(conf .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_PRINCIPAL_KEY), SecondaryNameNode.getHttpAddress(conf).getHostName())); } catch (Exception e) { // Don't halt if SecondaryNameNode principal could not be added. LOG.debug("SecondaryNameNode principal could not be added", e); String msg = String.format( "SecondaryNameNode principal not considered, %s = %s, %s = %s", DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_PRINCIPAL_KEY, conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_PRINCIPAL_KEY), DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, conf.get(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_DEFAULT)); LOG.warn(msg); } // Check the full principal name of all the configured valid requestors. for (String v : validRequestors) { if (LOG.isDebugEnabled()) LOG.debug("isValidRequestor is comparing to valid requestor: " + v); if (v != null && v.equals(ugi.getUserName())) { if (LOG.isDebugEnabled()) LOG.debug("isValidRequestor is allowing: " + ugi.getUserName()); return true; } } // Additionally, we compare the short name of the requestor to this JN's // username, because we want to allow requests from other JNs during // recovery, but we can't enumerate the full list of JNs. if (ugi.getShortUserName().equals( UserGroupInformation.getLoginUser().getShortUserName())) { if (LOG.isDebugEnabled()) LOG.debug("isValidRequestor is allowing other JN principal: " + ugi.getUserName()); return true; } if (LOG.isDebugEnabled()) LOG.debug("isValidRequestor is rejecting: " + ugi.getUserName()); return false; }
@Test public void testWithoutUser() throws IOException { // Test: Make a request without specifying a user HttpServletRequest request = mock(HttpServletRequest.class); boolean isValid = SERVLET.isValidRequestor(request, CONF); // Verify: The request is invalid assertThat(isValid).isFalse(); }
@Override public void readFrame(ChannelHandlerContext ctx, ByteBuf input, Http2FrameListener listener) throws Http2Exception { if (readError) { input.skipBytes(input.readableBytes()); return; } try { do { if (readingHeaders && !preProcessFrame(input)) { return; } // The header is complete, fall into the next case to process the payload. // This is to ensure the proper handling of zero-length payloads. In this // case, we don't want to loop around because there may be no more data // available, causing us to exit the loop. Instead, we just want to perform // the first pass at payload processing now. // Wait until the entire payload has been read. if (input.readableBytes() < payloadLength) { return; } // Slice to work only on the frame being read ByteBuf framePayload = input.readSlice(payloadLength); // We have consumed the data for this frame, next time we read, // we will be expecting to read a new frame header. readingHeaders = true; verifyFrameState(); processPayloadState(ctx, framePayload, listener); } while (input.isReadable()); } catch (Http2Exception e) { readError = !Http2Exception.isStreamError(e); throw e; } catch (RuntimeException e) { readError = true; throw e; } catch (Throwable cause) { readError = true; PlatformDependent.throwException(cause); } }
@Test public void readHeaderFrame() throws Http2Exception { final int streamId = 1; ByteBuf input = Unpooled.buffer(); try { Http2Headers headers = new DefaultHttp2Headers() .authority("foo") .method("get") .path("/") .scheme("https"); Http2Flags flags = new Http2Flags().endOfHeaders(true).endOfStream(true); writeHeaderFrame(input, streamId, headers, flags); frameReader.readFrame(ctx, input, listener); verify(listener).onHeadersRead(ctx, 1, headers, 0, true); } finally { input.release(); } }