focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static void addTransformationsInClassOrInterfaceDeclaration(final ClassOrInterfaceDeclaration toPopulate, final TransformationDictionary transformationDictionary, final LocalTransformations localTransformations) { String createTransformationDictionary = null; if (transformationDictionary != null) { BlockStmt createTransformationDictionaryBody = KiePMMLTransformationDictionaryFactory.getKiePMMLTransformationDictionaryVariableDeclaration(transformationDictionary); createTransformationDictionaryBody.addStatement(getReturnStmt(TRANSFORMATION_DICTIONARY)); createTransformationDictionary = "createTransformationDictionary"; MethodDeclaration createTransformationDictionaryMethod = toPopulate.addMethod(createTransformationDictionary, Modifier.Keyword.PRIVATE); createTransformationDictionaryMethod.setType(KiePMMLTransformationDictionary.class.getName()); createTransformationDictionaryMethod.setBody(createTransformationDictionaryBody); } String createLocalTransformations = null; if (localTransformations != null) { BlockStmt createLocalTransformationsBody = KiePMMLLocalTransformationsFactory.getKiePMMLLocalTransformationsVariableDeclaration(localTransformations); createLocalTransformationsBody.addStatement(getReturnStmt(LOCAL_TRANSFORMATIONS)); createLocalTransformations = "createLocalTransformations"; MethodDeclaration createLocalTransformationsMethod = toPopulate.addMethod(createLocalTransformations, Modifier.Keyword.PRIVATE); createLocalTransformationsMethod.setType(KiePMMLLocalTransformations.class.getName()); createLocalTransformationsMethod.setBody(createLocalTransformationsBody); } final ConstructorDeclaration constructorDeclaration = toPopulate.getDefaultConstructor().orElseThrow(() -> new KiePMMLInternalException(String.format(MISSING_DEFAULT_CONSTRUCTOR, toPopulate.getName()))); populateTransformationsInConstructor(constructorDeclaration, createTransformationDictionary, createLocalTransformations); }
@Test void addTransformationsInClassOrInterfaceDeclaration() throws IOException { assertThat(classOrInterfaceDeclaration.getMethodsByName("createTransformationDictionary")).isEmpty(); assertThat(classOrInterfaceDeclaration.getMethodsByName("createLocalTransformations")).isEmpty(); org.kie.pmml.compiler.commons.codegenfactories.KiePMMLModelFactoryUtils.addTransformationsInClassOrInterfaceDeclaration(classOrInterfaceDeclaration, pmmlModel.getTransformationDictionary(), model.getLocalTransformations()); assertThat(classOrInterfaceDeclaration.getMethodsByName("createTransformationDictionary")).hasSize(1); assertThat(classOrInterfaceDeclaration.getMethodsByName("createLocalTransformations")).hasSize(1); String text = getFileContent(TEST_01_SOURCE); MethodDeclaration expected = JavaParserUtils.parseMethod(text); MethodDeclaration retrieved = classOrInterfaceDeclaration.getMethodsByName("createTransformationDictionary").get(0); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); text = getFileContent(TEST_02_SOURCE); expected = JavaParserUtils.parseMethod(text); retrieved = classOrInterfaceDeclaration.getMethodsByName("createLocalTransformations").get(0); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); }
public alluxio.grpc.WorkerIdentity toProto() { return Parsers.toProto(this); }
@Test public void legacyConvertToProto() throws Exception { WorkerIdentity identity = new WorkerIdentity(Longs.toByteArray(1L), 0); alluxio.grpc.WorkerIdentity identityProto = identity.toProto(); assertEquals(alluxio.grpc.WorkerIdentity.newBuilder() .setVersion(0) .setIdentifier(ByteString.copyFrom(Longs.toByteArray(1L))) .build(), identityProto); }
@SuppressWarnings({"BooleanExpressionComplexity", "CyclomaticComplexity"}) public static boolean isScalablePushQuery( final Statement statement, final KsqlExecutionContext ksqlEngine, final KsqlConfig ksqlConfig, final Map<String, Object> overrides ) { if (!isPushV2Enabled(ksqlConfig, overrides)) { return false; } if (! (statement instanceof Query)) { return false; } final Query query = (Query) statement; final SourceFinder sourceFinder = new SourceFinder(); sourceFinder.process(query.getFrom(), null); // It will be present if it's not a join, which we don't handle if (!sourceFinder.getSourceName().isPresent()) { return false; } // Find all of the writers to this particular source. final SourceName sourceName = sourceFinder.getSourceName().get(); final Set<QueryId> upstreamQueries = ksqlEngine.getQueriesWithSink(sourceName); // See if the config or override have set the stream to be "latest" final boolean isLatest = isLatest(ksqlConfig, overrides); // Cannot be a pull query, i.e. must be a push return !query.isPullQuery() // Group by is not supported && !query.getGroupBy().isPresent() // Windowing is not supported && !query.getWindow().isPresent() // Having clause is not supported && !query.getHaving().isPresent() // Partition by is not supported && !query.getPartitionBy().isPresent() // There must be an EMIT CHANGES clause && (query.getRefinement().isPresent() && query.getRefinement().get().getOutputRefinement() == OutputRefinement.CHANGES) // Must be reading from "latest" && isLatest // We only handle a single sink source at the moment from a CTAS/CSAS && upstreamQueries.size() == 1 // ROWPARTITION and ROWOFFSET are not currently supported in SPQs && !containsDisallowedColumns(query); }
@Test public void shouldNotMakeQueryWithRowoffsetInSelectClauseScalablePush() { try(MockedStatic<ColumnExtractor> columnExtractor = mockStatic(ColumnExtractor.class)) { // Given: expectIsSPQ(SystemColumns.ROWOFFSET_NAME, columnExtractor); // When: final boolean isScalablePush = ScalablePushUtil.isScalablePushQuery( query, ksqlEngine, ksqlConfig, overrides ); // Then: assert(!isScalablePush); } }
public Predicate convert(List<ScalarOperator> operators, DeltaLakeContext context) { DeltaLakeExprVisitor visitor = new DeltaLakeExprVisitor(); List<Predicate> predicates = Lists.newArrayList(); for (ScalarOperator operator : operators) { Predicate predicate = operator.accept(visitor, context); if (predicate != null) { predicates.add(predicate); } } Optional<Predicate> result = predicates.stream().reduce(And::new); return result.orElse(ALWAYS_TRUE); }
@Test public void testConvertConstType() { ScalarOperationToDeltaLakeExpr converter = new ScalarOperationToDeltaLakeExpr(); ScalarOperationToDeltaLakeExpr.DeltaLakeContext context = new ScalarOperationToDeltaLakeExpr.DeltaLakeContext(schema, new HashSet<>()); ScalarOperator operator; List<ScalarOperator> operators; Predicate convertExpr; Predicate expectedExpr; ConstantOperator value; // Boolean value = ConstantOperator.createBoolean(true); operator = new BinaryPredicateOperator(BinaryType.LT, cBoolCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaBoolCol, Literal.ofBoolean(true)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // Tinyint value = ConstantOperator.createTinyInt((byte) 5); operator = new BinaryPredicateOperator(BinaryType.LT, cShortCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaShortCol, Literal.ofShort((short) 5)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // Smallint value = ConstantOperator.createSmallInt((short) 5); operator = new BinaryPredicateOperator(BinaryType.LT, cShortCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaShortCol, Literal.ofShort((short) 5)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // int value = ConstantOperator.createInt(5); operator = new BinaryPredicateOperator(BinaryType.LT, cIntCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaIntCol, Literal.ofInt(5)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // bigint value = ConstantOperator.createBigint(5); operator = new BinaryPredicateOperator(BinaryType.LT, cLongCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaLongCol, Literal.ofLong(5)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // float value = ConstantOperator.createFloat(5.5); operator = new BinaryPredicateOperator(BinaryType.LT, cFloatCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaFloatCol, Literal.ofFloat((float) 5.5)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // double value = ConstantOperator.createDouble(5.5); operator = new BinaryPredicateOperator(BinaryType.LT, cDoubleCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaDoubleCol, Literal.ofFloat((float) 5.5)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // date LocalDateTime localDateTime = LocalDateTime.now(); value = ConstantOperator.createDate(localDateTime); operator = new BinaryPredicateOperator(BinaryType.LT, cDateCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaDateCol, Literal.ofDate((int) localDateTime.toLocalDate().toEpochDay())); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // datetime (timestamp) localDateTime = LocalDateTime.now(); value = ConstantOperator.createDatetime(localDateTime); operator = new BinaryPredicateOperator(BinaryType.LT, cTimestampCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); ZoneId zoneId = TimeUtils.getTimeZone().toZoneId(); long timestamp = localDateTime.atZone(zoneId).toEpochSecond() * 1000 * 1000 + localDateTime.getNano() / 1000; expectedExpr = new Predicate("<", cDeltaTimestampCol, Literal.ofTimestamp(timestamp)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // datetime (timestamp_ntz) localDateTime = LocalDateTime.now(); value = ConstantOperator.createDatetime(localDateTime); operator = new BinaryPredicateOperator(BinaryType.LT, cTimestampNTZCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); timestamp = localDateTime.atZone(ZoneOffset.UTC).toEpochSecond() * 1000 * 1000 + localDateTime.getNano() / 1000; expectedExpr = new Predicate("<", cDeltaTimestampNTZCol, Literal.ofTimestamp(timestamp)); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // varchar value = ConstantOperator.createVarchar("12345"); operator = new BinaryPredicateOperator(BinaryType.LT, cVarcharCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaVarcharCol, Literal.ofString("12345")); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // char value = ConstantOperator.createChar("12345"); operator = new BinaryPredicateOperator(BinaryType.LT, cCharCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaCharCol, Literal.ofString("12345")); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); // hll value = ConstantOperator.createObject("12345", Type.HLL); operator = new BinaryPredicateOperator(BinaryType.LT, cHLLCol, value); operators = new ArrayList<>(List.of(operator)); convertExpr = converter.convert(operators, context); expectedExpr = new Predicate("<", cDeltaHLLCol, Literal.ofString("12345")); Assert.assertEquals(convertExpr.toString(), expectedExpr.toString()); }
public static void validate(WindowConfig windowConfig) { if (windowConfig.getWindowLengthDurationMs() == null && windowConfig.getWindowLengthCount() == null) { throw new IllegalArgumentException("Window length is not specified"); } if (windowConfig.getWindowLengthDurationMs() != null && windowConfig.getWindowLengthCount() != null) { throw new IllegalArgumentException( "Window length for time and count are set! Please set one or the other."); } if (windowConfig.getWindowLengthCount() != null) { if (windowConfig.getWindowLengthCount() <= 0) { throw new IllegalArgumentException( "Window length must be positive [" + windowConfig.getWindowLengthCount() + "]"); } } if (windowConfig.getWindowLengthDurationMs() != null) { if (windowConfig.getWindowLengthDurationMs() <= 0) { throw new IllegalArgumentException( "Window length must be positive [" + windowConfig.getWindowLengthDurationMs() + "]"); } } if (windowConfig.getSlidingIntervalCount() != null) { if (windowConfig.getSlidingIntervalCount() <= 0) { throw new IllegalArgumentException( "Sliding interval must be positive [" + windowConfig.getSlidingIntervalCount() + "]"); } } if (windowConfig.getSlidingIntervalDurationMs() != null) { if (windowConfig.getSlidingIntervalDurationMs() <= 0) { throw new IllegalArgumentException( "Sliding interval must be positive [" + windowConfig.getSlidingIntervalDurationMs() + "]"); } } if (windowConfig.getTimestampExtractorClassName() != null) { if (windowConfig.getMaxLagMs() != null) { if (windowConfig.getMaxLagMs() < 0) { throw new IllegalArgumentException( "Lag duration must be positive [" + windowConfig.getMaxLagMs() + "]"); } } if (windowConfig.getWatermarkEmitIntervalMs() != null) { if (windowConfig.getWatermarkEmitIntervalMs() <= 0) { throw new IllegalArgumentException( "Watermark interval must be positive [" + windowConfig.getWatermarkEmitIntervalMs() + "]"); } } } }
@Test public void testSettingTumblingCountWindow() throws Exception { final Object[] args = new Object[]{-1, 0, 1, 2, 5, 10, null}; for (Object arg : args) { Object arg0 = arg; try { Integer windowLengthCount = null; if (arg0 != null) { windowLengthCount = (Integer) arg0; } WindowConfig windowConfig = new WindowConfig(); windowConfig.setWindowLengthCount(windowLengthCount); WindowConfigUtils.validate(windowConfig); if (arg0 == null) { fail(String.format("Window length cannot be null -- windowLengthCount: %s", arg0)); } if ((Integer) arg0 <= 0) { fail(String.format("Window length cannot be zero or less -- windowLengthCount: %s", arg0)); } } catch (IllegalArgumentException e) { if (arg0 != null && (Integer) arg0 > 0) { fail(String.format("Exception: %s thrown on valid input -- windowLengthCount: %s", e .getMessage(), arg0)); } } } }
static long calculateGrouping(Set<Integer> groupingSet, List<Integer> columns) { long grouping = (1L << columns.size()) - 1; for (int index = 0; index < columns.size(); index++) { int column = columns.get(index); if (groupingSet.contains(column)) { // Leftmost argument to grouping() (i.e. when index = 0) corresponds to // the most significant bit in the result. That is why we shift 1L starting // from the columns.size() - 1 bit index. grouping = grouping & ~(1L << (columns.size() - 1 - index)); } } return grouping; }
@Test public void testMoreThanThirtyTwoArguments() { List<Set<Integer>> groupingSetOrdinals = ImmutableList.of(ImmutableSet.of(20, 2, 13, 33, 40, 9, 14), ImmutableSet.of(28, 4, 5, 29, 31, 10)); List<Long> expectedResults = ImmutableList.of(822283861886L, 995358664191L); for (int groupId = 0; groupId < groupingSetOrdinals.size(); groupId++) { Set<Integer> groupingSet = groupingSetOrdinals.get(groupId); assertEquals(Long.valueOf(calculateGrouping(groupingSet, fortyIntegers)), expectedResults.get(groupId)); } }
@Override public boolean isValidName(String src) { return myFs.isValidName(fullPath(new Path(src)).toUri().toString()); }
@Test public void testIsValidNameValidInBaseFs() throws Exception { AbstractFileSystem baseFs = Mockito.spy(fc.getDefaultFileSystem()); ChRootedFs chRootedFs = new ChRootedFs(baseFs, new Path("/chroot")); Mockito.doReturn(true).when(baseFs).isValidName(Mockito.anyString()); Assert.assertTrue(chRootedFs.isValidName("/test")); Mockito.verify(baseFs).isValidName("/chroot/test"); }
public static <InputT, OutputT> Growth<InputT, OutputT, OutputT> growthOf( Growth.PollFn<InputT, OutputT> pollFn, Requirements requirements) { return new AutoValue_Watch_Growth.Builder<InputT, OutputT, OutputT>() .setTerminationPerInput(Growth.never()) .setPollFn(Contextful.of(pollFn, requirements)) // use null as a signal that this is the identity function and output coder can be // reused as key coder .setOutputKeyFn(null) .build(); }
@Test @Category({NeedsRunner.class, UsesUnboundedSplittableParDo.class}) public void testMultiplePollsWithKeyExtractor() { List<KV<Integer, String>> polls = Arrays.asList( KV.of(0, "0"), KV.of(10, "10"), KV.of(20, "20"), KV.of(30, "30"), KV.of(40, "40"), KV.of(40, "40.1"), KV.of(20, "20.1"), KV.of(50, "50"), KV.of(10, "10.1"), KV.of(10, "10.2"), KV.of(60, "60"), KV.of(70, "70"), KV.of(60, "60.1"), KV.of(80, "80"), KV.of(40, "40.2"), KV.of(90, "90"), KV.of(90, "90.1")); List<Integer> expected = Arrays.asList(0, 10, 20, 30, 40, 50, 60, 70, 80, 90); PCollection<Integer> res = p.apply(Create.of("a")) .apply( Watch.growthOf( Contextful.of( new TimedPollFn<String, KV<Integer, String>>( polls, standardSeconds(1) /* timeToOutputEverything */, standardSeconds(3) /* timeToDeclareOutputFinal */, standardSeconds(30) /* timeToFail */), Requirements.empty()), KV::getKey) .withTerminationPerInput(Growth.afterTotalOf(standardSeconds(5))) .withPollInterval(Duration.millis(100)) .withOutputCoder(KvCoder.of(VarIntCoder.of(), StringUtf8Coder.of())) .withOutputKeyCoder(VarIntCoder.of())) .apply("Drop input", Values.create()) .apply("Drop auxiliary string", Keys.create()); PAssert.that(res).containsInAnyOrder(expected); p.run(); }
@Udf public String chr(@UdfParameter( description = "Decimal codepoint") final Integer decimalCode) { if (decimalCode == null) { return null; } if (!Character.isValidCodePoint(decimalCode)) { return null; } final char[] resultChars = Character.toChars(decimalCode); return String.valueOf(resultChars); }
@Test public void shouldReturnSingleCharForMaxBMPDecimal() { final String result = udf.chr(65535); assertThat(result.codePointAt(0), is(65535)); assertThat(result.toCharArray().length, is(1)); }
public boolean hasRequiredCoordinatorSidecars() { if (currentCoordinatorSidecarCount > 1) { throw new PrestoException(TOO_MANY_SIDECARS, format("Expected a single active coordinator sidecar. Found %s active coordinator sidecars", currentCoordinatorSidecarCount)); } return currentCoordinatorSidecarCount == 1; }
@Test(expectedExceptions = PrestoException.class, expectedExceptionsMessageRegExp = "Expected a single active coordinator sidecar. Found 2 active coordinator sidecars") public void testHasRequiredCoordinatorSidecarsMoreThanOne() throws InterruptedException { assertFalse(monitor.hasRequiredCoordinatorSidecars()); for (int i = numCoordinatorSidecars.get(); i < DESIRED_COORDINATOR_SIDECAR_COUNT + 1; i++) { addCoordinatorSidecar(nodeManager); } assertFalse(monitor.hasRequiredCoordinatorSidecars()); }
public MaintenanceAssociation decode(ObjectNode json, CodecContext context, int mdNameLen) { if (json == null || !json.isObject()) { return null; } JsonNode maNode = json.get(MA); String maName = nullIsIllegal(maNode.get(MA_NAME), "maName is required").asText(); String maNameType = MaIdShort.MaIdType.CHARACTERSTRING.name(); if (maNode.get(MA_NAME_TYPE) != null) { maNameType = maNode.get(MA_NAME_TYPE).asText(); } try { MaIdShort maId = MdMaNameUtil.parseMaName(maNameType, maName); MaBuilder builder = DefaultMaintenanceAssociation.builder(maId, mdNameLen); JsonNode maNumericIdNode = maNode.get(MA_NUMERIC_ID); if (maNumericIdNode != null) { short mdNumericId = (short) maNumericIdNode.asInt(); builder = builder.maNumericId(mdNumericId); } if (maNode.get(CCM_INTERVAL) != null) { builder.ccmInterval(CcmInterval.valueOf(maNode.get(CCM_INTERVAL).asText())); } List<Component> componentList = (new ComponentCodec()).decode((ArrayNode) nullIsIllegal(maNode.get(COMPONENT_LIST), "component-list is required"), context); for (Component component:componentList) { builder = builder.addToComponentList(component); } JsonNode rmepListJson = maNode.get(RMEP_LIST); if (rmepListJson != null) { List<MepId> remoteMeps = (new RMepCodec()).decode( (ArrayNode) rmepListJson, context); for (MepId remoteMep:remoteMeps) { builder = builder.addToRemoteMepIdList(remoteMep); } } return builder.build(); } catch (CfmConfigException e) { throw new IllegalArgumentException(e); } }
@Test public void testDecodeMa2() throws IOException { String mdString = "{\"ma\": { \"maName\": 1234," + "\"maNameType\": \"PRIMARYVID\"," + "\"component-list\": [], " + "\"rmep-list\": [], " + "\"maNumericId\": 2}}"; InputStream input = new ByteArrayInputStream( mdString.getBytes(StandardCharsets.UTF_8)); JsonNode cfg = mapper.readTree(input); MaintenanceAssociation maDecode2 = ((MaintenanceAssociationCodec) context .codec(MaintenanceAssociation.class)) .decode((ObjectNode) cfg, context, 10); assertEquals(MAID2_VID, maDecode2.maId()); }
@Override public boolean consume(CodeReader code, TokenQueue output) { if (code.popTo(matcher, tmpBuilder) > 0) { // see SONAR-2499 Cursor previousCursor = code.getPreviousCursor(); if (normalizationValue != null) { output.add(new Token(normalizationValue, previousCursor.getLine(), previousCursor.getColumn())); } else { output.add(new Token(tmpBuilder.toString(), previousCursor.getLine(), previousCursor.getColumn())); } // Godin: note that other channels use method delete in order to do the same thing tmpBuilder.setLength(0); return true; } return false; }
@Test public void shouldCorrectlyDeterminePositionWhenTokenSpansMultipleLines() { TokenChannel channel = new TokenChannel("AB\nC"); TokenQueue output = mock(TokenQueue.class); CodeReader codeReader = new CodeReader("AB\nCD"); assertThat(channel.consume(codeReader, output), is(true)); ArgumentCaptor<Token> token = ArgumentCaptor.forClass(Token.class); verify(output).add(token.capture()); assertThat(token.getValue(), is(new Token("AB\nC", 1, 0))); verifyNoMoreInteractions(output); assertThat(codeReader.getLinePosition(), is(2)); assertThat(codeReader.getColumnPosition(), is(1)); }
public ImmutableList<GlobalSetting> parse(final InputStream is) { return Jsons.toObjects(is, GlobalSetting.class); }
@Test public void should_parse_glob_settings_file() { InputStream stream = getResourceAsStream("settings/glob-settings.json"); ImmutableList<GlobalSetting> globalSettings = parser.parse(stream); ImmutableList<String> includes = globalSettings.get(0).includes(); assertThat(includes.contains(join("src", "test", "resources", "settings", "details", "foo.json")), is(true)); assertThat(includes.contains(join("src", "test", "resources", "settings", "details", "bar.json")), is(true)); }
@Override public void onRestRequest(RestRequest req, RequestContext requestContext, Map<String, String> wireAttrs, NextFilter<RestRequest, RestResponse> nextFilter) { disruptRequest(req, requestContext, wireAttrs, nextFilter); }
@Test public void testRestErrorDisrupt() throws Exception { final RequestContext requestContext = new RequestContext(); requestContext.putLocalAttr(DISRUPT_CONTEXT_KEY, DisruptContexts.error(REQUEST_LATENCY)); final DisruptFilter filter = new DisruptFilter(_scheduler, _executor, REQUEST_TIMEOUT, _clock); final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean success = new AtomicBoolean(false); final NextFilter<RestRequest, RestResponse> next = new NextFilter<RestRequest, RestResponse>() { @Override public void onRequest(RestRequest restRequest, RequestContext requestContext, Map<String, String> wireAttrs) { latch.countDown(); } @Override public void onResponse(RestResponse restResponse, RequestContext requestContext, Map<String, String> wireAttrs) { latch.countDown(); } @Override public void onError(Throwable ex, RequestContext requestContext, Map<String, String> wireAttrs) { success.set(ex instanceof DisruptedException); latch.countDown(); } }; filter.onRestRequest(new RestRequestBuilder(new URI(URI)).build(), requestContext, Collections.emptyMap(), next); Assert.assertTrue(latch.await(TEST_TIMEOUT, TimeUnit.MILLISECONDS), "Missing NextFilter invocation"); Assert.assertTrue(success.get(), "Unexpected method invocation"); }
@Override public RestLiResponseData<BatchCreateResponseEnvelope> buildRestLiResponseData(Request request, RoutingResult routingResult, Object result, Map<String, String> headers, List<HttpCookie> cookies) { Object altKey = null; if (routingResult.getContext().hasParameter(RestConstants.ALT_KEY_PARAM)) { altKey = routingResult.getContext().getParameter(RestConstants.ALT_KEY_PARAM); } final ProtocolVersion protocolVersion = ProtocolVersionUtil.extractProtocolVersion(headers); final ResourceContext resourceContext = routingResult.getContext(); if (result instanceof BatchCreateKVResult && resourceContext.isReturnEntityRequested()) { BatchCreateKVResult<?, ?> list = (BatchCreateKVResult<?, ?>) result; if (list.getResults() == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null List inside of a BatchCreateKVResult returned by the resource method: " + routingResult .getResourceMethod()); } List<BatchCreateResponseEnvelope.CollectionCreateResponseItem> collectionCreateList = new ArrayList<>(list.getResults().size()); TimingContextUtil.beginTiming(routingResult.getContext().getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); for (CreateKVResponse<?, ?> createKVResponse : list.getResults()) { if (createKVResponse == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null element inside of List inside of a BatchCreateKVResult returned by the resource method: " + routingResult.getResourceMethod()); } else { Object id = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(createKVResponse.getId(), routingResult); if (createKVResponse.getError() == null) { DataMap entityData = createKVResponse.getEntity() != null ? createKVResponse.getEntity().data() : null; final DataMap data = RestUtils.projectFields(entityData, resourceContext); CreateIdEntityStatus<Object, RecordTemplate> entry = new CreateIdEntityStatus<>( createKVResponse.getStatus().getCode(), id, new AnyRecord(data), getLocationUri(request, id, altKey, protocolVersion), // location uri null, protocolVersion); collectionCreateList.add(new BatchCreateResponseEnvelope.CollectionCreateResponseItem(entry)); } else { collectionCreateList.add(new BatchCreateResponseEnvelope.CollectionCreateResponseItem(createKVResponse.getError())); } } } TimingContextUtil.endTiming(routingResult.getContext().getRawRequestContext(), FrameworkTimingKeys.SERVER_RESPONSE_RESTLI_PROJECTION_APPLY.key()); return new RestLiResponseDataImpl<>(new BatchCreateResponseEnvelope(HttpStatus.S_200_OK, collectionCreateList, true), headers, cookies); } else { List<? extends CreateResponse> createResponses = extractCreateResponseList(result); //Verify that a null list was not passed into the BatchCreateResult. If so, this is a developer error. if (createResponses == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null List inside of a BatchCreateResult returned by the resource method: " + routingResult .getResourceMethod()); } List<BatchCreateResponseEnvelope.CollectionCreateResponseItem> collectionCreateList = new ArrayList<>(createResponses.size()); for (CreateResponse createResponse : createResponses) { //Verify that a null element was not passed into the BatchCreateResult list. If so, this is a developer error. if (createResponse == null) { throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Unexpected null encountered. Null element inside of List inside of a BatchCreateResult returned by the resource method: " + routingResult.getResourceMethod()); } else { Object id = ResponseUtils.translateCanonicalKeyToAlternativeKeyIfNeeded(createResponse.getId(), routingResult); if (createResponse.getError() == null) { CreateIdStatus<Object> entry = new CreateIdStatus<>( createResponse.getStatus().getCode(), id, getLocationUri(request, id, altKey, protocolVersion), // location uri null, protocolVersion); collectionCreateList.add(new BatchCreateResponseEnvelope.CollectionCreateResponseItem(entry)); } else { collectionCreateList.add(new BatchCreateResponseEnvelope.CollectionCreateResponseItem(createResponse.getError())); } } } return new RestLiResponseDataImpl<>(new BatchCreateResponseEnvelope(HttpStatus.S_200_OK, collectionCreateList, false), headers, cookies); } }
@Test @SuppressWarnings("unchecked") public void testProjectionInBuildRestLiResponseData() throws URISyntaxException { MaskTree maskTree = new MaskTree(); maskTree.addOperation(new PathSpec("fruitsField"), MaskOperation.POSITIVE_MASK_OP); ServerResourceContext mockContext = EasyMock.createMock(ServerResourceContext.class); EasyMock.expect(mockContext.hasParameter(RestConstants.ALT_KEY_PARAM)).andReturn(false).atLeastOnce(); EasyMock.expect(mockContext.isReturnEntityRequested()).andReturn(true); EasyMock.expect(mockContext.getProjectionMode()).andReturn(ProjectionMode.AUTOMATIC); EasyMock.expect(mockContext.getProjectionMask()).andReturn(maskTree); EasyMock.expect(mockContext.getRawRequestContext()).andReturn(new RequestContext()).anyTimes(); EasyMock.expect(mockContext.getAlwaysProjectedFields()).andReturn(Collections.emptySet()).anyTimes(); EasyMock.replay(mockContext); ResourceMethodDescriptor mockDescriptor = getMockResourceMethodDescriptor(null); RoutingResult routingResult = new RoutingResult(mockContext, mockDescriptor); List<CreateKVResponse<Long, Foo>> createKVResponses = new ArrayList<>(); Foo foo = new Foo(); foo.setStringField("foo1"); foo.setFruitsField(Fruits.APPLE); createKVResponses.add(new CreateKVResponse<>(1L, foo)); BatchCreateKVResult<Long, Foo> results = new BatchCreateKVResult<>(createKVResponses); BatchCreateResponseBuilder responseBuilder = new BatchCreateResponseBuilder(new ErrorResponseBuilder()); RestRequest request = new RestRequestBuilder(new URI("/foo")).build(); RestLiResponseData<BatchCreateResponseEnvelope> responseData = responseBuilder.buildRestLiResponseData(request, routingResult, results, Collections.emptyMap(), Collections.emptyList()); Assert.assertTrue(responseData.getResponseEnvelope().isGetAfterCreate()); CreateIdEntityStatus<Long, Foo> item = (CreateIdEntityStatus<Long, Foo>) responseData.getResponseEnvelope().getCreateResponses().get(0).getRecord(); Assert.assertEquals(item.getLocation(), "/foo/1"); DataMap dataMap = item.data().getDataMap("entity"); Assert.assertEquals(dataMap.size(), 1); Assert.assertEquals(dataMap.get("fruitsField"), Fruits.APPLE.toString()); EasyMock.verify(mockContext); }
@Override public Cursor<byte[]> scan(RedisClusterNode node, ScanOptions options) { return new ScanCursor<byte[]>(0, options) { private RedisClient client = getEntry(node); @Override protected ScanIteration<byte[]> doScan(long cursorId, ScanOptions options) { if (isQueueing() || isPipelined()) { throw new UnsupportedOperationException("'SSCAN' cannot be called in pipeline / transaction mode."); } if (client == null) { return null; } List<Object> args = new ArrayList<Object>(); args.add(Long.toUnsignedString(cursorId)); if (options.getPattern() != null) { args.add("MATCH"); args.add(options.getPattern()); } if (options.getCount() != null) { args.add("COUNT"); args.add(options.getCount()); } RFuture<ListScanResult<byte[]>> f = executorService.readAsync(client, ByteArrayCodec.INSTANCE, RedisCommands.SCAN, args.toArray()); ListScanResult<byte[]> res = syncFuture(f); String pos = res.getPos(); client = res.getRedisClient(); if ("0".equals(pos)) { client = null; } return new ScanIteration<byte[]>(Long.parseUnsignedLong(pos), res.getValues()); } }.open(); }
@Test public void testScan() { testInCluster(connection -> { Map<byte[], byte[]> map = new HashMap<>(); for (int i = 0; i < 10000; i++) { map.put(RandomString.make(32).getBytes(), RandomString.make(32).getBytes(StandardCharsets.UTF_8)); } connection.mSet(map); Cursor<byte[]> b = connection.scan(ScanOptions.scanOptions().build()); Set<String> sett = new HashSet<>(); int counter = 0; while (b.hasNext()) { byte[] tt = b.next(); sett.add(new String(tt)); counter++; } assertThat(sett.size()).isEqualTo(map.size()); assertThat(counter).isEqualTo(map.size()); }); }
@Override public Dimension render(Graphics2D graphics) { Font originalFont = null; if (font != null) { originalFont = graphics.getFont(); graphics.setFont(font); } final FontMetrics fontMetrics = graphics.getFontMetrics(); Matcher matcher = COL_TAG_PATTERN.matcher(text); Color textColor = color; int idx = 0; int width = 0; while (matcher.find()) { String color = matcher.group(1); String s = text.substring(idx, matcher.start()); idx = matcher.end(); renderText(graphics, textColor, position.x + width, position.y, s); width += fontMetrics.stringWidth(s); textColor = Color.decode("#" + color); } { String s = text.substring(idx); renderText(graphics, textColor, position.x + width, position.y, s); width += fontMetrics.stringWidth(s); } int height = fontMetrics.getHeight(); if (originalFont != null) { graphics.setFont(originalFont); } return new Dimension(width, height); }
@Test public void testRender() { TextComponent textComponent = new TextComponent(); textComponent.setText("test"); textComponent.setColor(Color.RED); textComponent.render(graphics); verify(graphics, times(2)).drawString(eq("test"), anyInt(), anyInt()); verify(graphics).setColor(Color.RED); }
public static void appendFlagsAsChars(final short flags, final Appendable appendable) { final int length = 8; short mask = (short)(1 << (length - 1)); try { for (int i = 0; i < length; i++) { appendable.append((flags & mask) == mask ? '1' : '0'); mask >>= 1; } } catch (final IOException ex) { LangUtil.rethrowUnchecked(ex); } }
@Test void shouldAppendFlags() { final short flags = 0b01100000; final StringBuilder builder = new StringBuilder(); HeaderFlyweight.appendFlagsAsChars(flags, builder); assertEquals("01100000", builder.toString()); }
public MultivariateGaussianDistribution(double[] mean, double variance) { if (variance <= 0) { throw new IllegalArgumentException("Variance is not positive: " + variance); } mu = mean; sigma = Matrix.diag(mu.length, variance); diagonal = true; length = mu.length + 1; init(); }
@Test public void testMultivariateGaussianDistribution() { System.out.println("MultivariateGaussianDistribution"); MathEx.setSeed(19650218); // to get repeatable results. MultivariateGaussianDistribution instance = new MultivariateGaussianDistribution(mu, sigma[0]); double[][] data = instance.rand(2000); MultivariateGaussianDistribution est = MultivariateGaussianDistribution.fit(data, true); assertArrayEquals(mu, est.mean(), 5E-2); for (int i = 0; i < mu.length; i++) { assertEquals(sigma[0][i], est.sigma.get(i, i), 5E-2); for (int j = 0; j < mu.length; j++) { if (i != j) { assertEquals(0, est.sigma.get(i, j), 1E-10); } } } instance = new MultivariateGaussianDistribution(mu, Matrix.of(sigma)); data = instance.rand(2000); est = MultivariateGaussianDistribution.fit(data); assertArrayEquals(mu, est.mean(), 5E-2); for (int i = 0; i < mu.length; i++) { for (int j = 0; j < mu.length; j++) { assertEquals(sigma[i][j], est.sigma.get(i, j), 5E-2); } } est = MultivariateGaussianDistribution.fit(data, true); assertArrayEquals(mu, est.mean(), 5E-2); for (int i = 0; i < mu.length; i++) { for (int j = 0; j < mu.length; j++) { if (i == j) { assertEquals(sigma[i][j], est.sigma.get(i, j), 5E-2); } else { assertEquals(0.0, est.sigma.get(i, j), 1E-10); } } } }
@Override public SGDVector add(SGDVector other) { if (other.size() != size) { throw new IllegalArgumentException("Can't add two vectors of different dimension, this = " + size + ", other = " + other.size()); } if (other instanceof DenseVector) { return other.add(this); } else if (other instanceof SparseVector) { Map<Integer, Double> values = new HashMap<>(); for (VectorTuple tuple : this) { values.put(tuple.index, tuple.value); } for (VectorTuple tuple : other) { values.merge(tuple.index, tuple.value, Double::sum); } return createSparseVector(size, values); } else { throw new IllegalArgumentException("Vector other is not dense or sparse."); } }
@Test public void add() { SparseVector a = generateVectorA(); SparseVector b = generateVectorB(); SparseVector c = generateVectorC(); SparseVector empty = generateEmptyVector(); assertEquals(a,a.add(empty), "A + empty"); assertEquals(b,b.add(empty), "B + empty"); assertEquals(c,c.add(empty), "C + empty"); assertEquals(scale(a,2.0),a.add(a), "A * 2"); assertEquals(scale(b,2.0),b.add(b), "B * 2"); assertEquals(scale(c,2.0),c.add(c), "C * 2"); SparseVector aAddB = generateVectorAAddB(); SparseVector aAddC = generateVectorAAddC(); SparseVector bAddC = generateVectorBAddC(); assertEquals(aAddB, a.add(b), "A + B"); assertEquals(aAddC, a.add(c), "A + C"); assertEquals(aAddB, b.add(a), "B + A"); assertEquals(bAddC, b.add(c), "B + C"); assertEquals(aAddC, c.add(a), "C + A"); assertEquals(bAddC, c.add(b), "C + B"); }
public static DistCpOptions parse(String[] args) throws IllegalArgumentException { CommandLineParser parser = new CustomParser(); CommandLine command; try { command = parser.parse(cliOptions, args, true); } catch (ParseException e) { throw new IllegalArgumentException("Unable to parse arguments. " + Arrays.toString(args), e); } DistCpOptions.Builder builder = parseSourceAndTargetPaths(command); builder .withAtomicCommit( command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch())) .withSyncFolder( command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch())) .withDeleteMissing( command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch())) .withIgnoreFailures( command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch())) .withOverwrite( command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch())) .withAppend( command.hasOption(DistCpOptionSwitch.APPEND.getSwitch())) .withSkipCRC( command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch())) .withBlocking( !command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch())) .withVerboseLog( command.hasOption(DistCpOptionSwitch.VERBOSE_LOG.getSwitch())) .withDirectWrite( command.hasOption(DistCpOptionSwitch.DIRECT_WRITE.getSwitch())) .withUseIterator( command.hasOption(DistCpOptionSwitch.USE_ITERATOR.getSwitch())) .withUpdateRoot( command.hasOption(DistCpOptionSwitch.UPDATE_ROOT.getSwitch())); if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.DIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseDiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.RDIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseRdiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) { builder.withFiltersFile( getVal(command, DistCpOptionSwitch.FILTERS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) { builder.withLogPath( new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) { final String workPath = getVal(command, DistCpOptionSwitch.WORK_PATH.getSwitch()); if (workPath != null && !workPath.isEmpty()) { builder.withAtomicWorkPath(new Path(workPath)); } } if (command.hasOption(DistCpOptionSwitch.TRACK_MISSING.getSwitch())) { builder.withTrackMissing( new Path(getVal( command, DistCpOptionSwitch.TRACK_MISSING.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.BANDWIDTH.getSwitch())) { try { final Float mapBandwidth = Float.parseFloat( getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch())); builder.withMapBandwidth(mapBandwidth); } catch (NumberFormatException e) { throw new IllegalArgumentException("Bandwidth specified is invalid: " + getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()), e); } } if (command.hasOption( DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())) { try { final Integer numThreads = Integer.parseInt(getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())); builder.withNumListstatusThreads(numThreads); } catch (NumberFormatException e) { throw new IllegalArgumentException( "Number of liststatus threads is invalid: " + getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.MAX_MAPS.getSwitch())) { try { final Integer maps = Integer.parseInt( getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch())); builder.maxMaps(maps); } catch (NumberFormatException e) { throw new IllegalArgumentException("Number of maps is invalid: " + getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) { builder.withCopyStrategy( getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())) { builder.preserve( getVal(command, DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.FILE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.FILE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.SIZE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.SIZE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch())) { final String chunkSizeStr = getVal(command, DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch().trim()); try { int csize = Integer.parseInt(chunkSizeStr); csize = csize > 0 ? csize : 0; LOG.info("Set distcp blocksPerChunk to " + csize); builder.withBlocksPerChunk(csize); } catch (NumberFormatException e) { throw new IllegalArgumentException("blocksPerChunk is invalid: " + chunkSizeStr, e); } } if (command.hasOption(DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch())) { final String copyBufferSizeStr = getVal(command, DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch().trim()); try { int copyBufferSize = Integer.parseInt(copyBufferSizeStr); builder.withCopyBufferSize(copyBufferSize); } catch (NumberFormatException e) { throw new IllegalArgumentException("copyBufferSize is invalid: " + copyBufferSizeStr, e); } } return builder.build(); }
@Test public void testMissingTarget() { try { OptionsParser.parse(new String[] { "-f", "hdfs://localhost:8020/source"}); Assert.fail("Missing target allowed"); } catch (IllegalArgumentException ignore) {} }
@Override public ServletStream stream() { return stream; }
@Test public void setCharacterEncoding_encodingIsSet() { underTest.stream().setCharacterEncoding("UTF-8"); verify(response).setCharacterEncoding("UTF-8"); }
public static Reachability get() { return new ProtocolAwareReachability(new ReachabilityFactory().create()); }
@Test public void testGet() { assertNotNull(ReachabilityFactory.get()); }
@Override public Endpoints leaderEndpoints() { return localListeners; }
@Test public void testLeaderEndpoints() { VoterSet voters = VoterSetTest.voterSet(Stream.of(localReplicaKey)); LeaderState<?> state = newLeaderState(voters, 0L); assertNotEquals(Endpoints.empty(), state.leaderEndpoints()); assertEquals(voters.listeners(localReplicaKey.id()), state.leaderEndpoints()); }
@Override public final void run() { long valueCount = collector.getMergingValueCount(); if (valueCount == 0) { return; } runInternal(); assert operationCount > 0 : "No merge operations have been invoked in AbstractContainerMerger"; try { long timeoutMillis = Math.max(valueCount * TIMEOUT_FACTOR, MINIMAL_TIMEOUT_MILLIS); if (!semaphore.tryAcquire(operationCount, timeoutMillis, TimeUnit.MILLISECONDS)) { logger.warning("Split-brain healing for " + getLabel() + " didn't finish within the timeout..."); } } catch (InterruptedException e) { logger.finest("Interrupted while waiting for split-brain healing of " + getLabel() + "..."); Thread.currentThread().interrupt(); } finally { collector.destroy(); } }
@Test @RequireAssertEnabled public void testMergerRun_whenMergerIsInterrupted_thenMergerFinishesEventually() { TestMergeOperation operation = new TestMergeOperation(BLOCKS); final TestContainerMerger merger = new TestContainerMerger(collector, nodeEngine, operation); Thread thread = new Thread(() -> merger.run()); thread.start(); thread.interrupt(); assertJoinable(thread); operation.unblock(); // we cannot assert if the operation has been invoked, since the interruption could be faster assertTrue("Expected collected containers to be destroyed", collector.onDestroyHasBeenCalled); }
@Override public CloseableIterator<String> readScannerLogs() { ensureInitialized(); File file = delegate.getFileStructure().analysisLog(); if (!file.exists()) { return CloseableIterator.emptyCloseableIterator(); } try { InputStreamReader reader = new InputStreamReader(FileUtils.openInputStream(file), UTF_8); return new LineReaderIterator(reader); } catch (IOException e) { throw new IllegalStateException("Fail to open file " + file, e); } }
@Test public void readScannerLogs_no_logs() { CloseableIterator<String> logs = underTest.readScannerLogs(); assertThat(logs.hasNext()).isFalse(); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .nullable(column.isNullable()) .comment(column.getComment()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case BOOLEAN: builder.columnType(XUGU_BOOLEAN); builder.dataType(XUGU_BOOLEAN); break; case TINYINT: builder.columnType(XUGU_TINYINT); builder.dataType(XUGU_TINYINT); break; case SMALLINT: builder.columnType(XUGU_SMALLINT); builder.dataType(XUGU_SMALLINT); break; case INT: builder.columnType(XUGU_INTEGER); builder.dataType(XUGU_INTEGER); break; case BIGINT: builder.columnType(XUGU_BIGINT); builder.dataType(XUGU_BIGINT); break; case FLOAT: builder.columnType(XUGU_FLOAT); builder.dataType(XUGU_FLOAT); break; case DOUBLE: builder.columnType(XUGU_DOUBLE); builder.dataType(XUGU_DOUBLE); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = (int) Math.max(0, scale - (precision - MAX_PRECISION)); precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } builder.columnType(String.format("%s(%s,%s)", XUGU_NUMERIC, precision, scale)); builder.dataType(XUGU_NUMERIC); builder.precision(precision); builder.scale(scale); break; case BYTES: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(XUGU_BLOB); builder.dataType(XUGU_BLOB); } else if (column.getColumnLength() <= MAX_BINARY_LENGTH) { builder.columnType(XUGU_BINARY); builder.dataType(XUGU_BINARY); } else { builder.columnType(XUGU_BLOB); builder.dataType(XUGU_BLOB); } break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(String.format("%s(%s)", XUGU_VARCHAR, MAX_VARCHAR_LENGTH)); builder.dataType(XUGU_VARCHAR); } else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", XUGU_VARCHAR, column.getColumnLength())); builder.dataType(XUGU_VARCHAR); } else { builder.columnType(XUGU_CLOB); builder.dataType(XUGU_CLOB); } break; case DATE: builder.columnType(XUGU_DATE); builder.dataType(XUGU_DATE); break; case TIME: builder.dataType(XUGU_TIME); if (column.getScale() != null && column.getScale() > 0) { Integer timeScale = column.getScale(); if (timeScale > MAX_TIME_SCALE) { timeScale = MAX_TIME_SCALE; log.warn( "The time column {} type time({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to time({})", column.getName(), column.getScale(), MAX_SCALE, timeScale); } builder.columnType(String.format("%s(%s)", XUGU_TIME, timeScale)); builder.scale(timeScale); } else { builder.columnType(XUGU_TIME); } break; case TIMESTAMP: if (column.getScale() == null || column.getScale() <= 0) { builder.columnType(XUGU_TIMESTAMP); } else { int timestampScale = column.getScale(); if (column.getScale() > MAX_TIMESTAMP_SCALE) { timestampScale = MAX_TIMESTAMP_SCALE; log.warn( "The timestamp column {} type timestamp({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to timestamp({})", column.getName(), column.getScale(), MAX_TIMESTAMP_SCALE, timestampScale); } builder.columnType(String.format("TIMESTAMP(%s)", timestampScale)); builder.scale(timestampScale); } builder.dataType(XUGU_TIMESTAMP); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.XUGU, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertUnsupported() { Column column = PhysicalColumn.of( "test", new MapType<>(BasicType.STRING_TYPE, BasicType.STRING_TYPE), (Long) null, true, null, null); try { XuguTypeConverter.INSTANCE.reconvert(column); Assertions.fail(); } catch (SeaTunnelRuntimeException e) { // ignore } catch (Throwable e) { Assertions.fail(); } }
public void add(Boolean bool) { elements.add(bool == null ? JsonNull.INSTANCE : new JsonPrimitive(bool)); }
@Test public void testCharPrimitiveAddition() { JsonArray jsonArray = new JsonArray(); jsonArray.add('a'); jsonArray.add('e'); jsonArray.add('i'); jsonArray.add((char) 111); jsonArray.add((Character) null); jsonArray.add('u'); jsonArray.add("and sometimes Y"); assertThat(jsonArray.toString()) .isEqualTo("[\"a\",\"e\",\"i\",\"o\",null,\"u\",\"and sometimes Y\"]"); }
public JobTriggerDto create(JobTriggerDto trigger) { requireNonNull(trigger, "trigger cannot be null"); // Make sure we don't save triggers that have an ID. That would potentially overwrite an existing trigger // and destroy locks and other data. if (trigger.id() != null) { throw new IllegalArgumentException("New trigger must not have an ID"); } var id = insertedIdAsString(collection.insertOne(trigger)); return trigger.toBuilder().id(id).build(); }
@Test public void createTrigger() { final JobTriggerDto trigger = dbJobTriggerService.create(JobTriggerDto.Builder.create(clock) .jobDefinitionId("abc-123") .jobDefinitionType("event-processor-execution-v1") .schedule(IntervalJobSchedule.builder() .interval(1) .unit(TimeUnit.SECONDS) .build()) .build()); assertThat(trigger.id()).isNotBlank(); assertThat(trigger.status()).isEqualTo(JobTriggerStatus.RUNNABLE); assertThat(trigger.lock()).isEqualTo(JobTriggerLock.empty()); assertThatCode(() -> dbJobTriggerService.create(null)) .isInstanceOf(NullPointerException.class) .hasMessageContaining("trigger cannot be null"); }
@Override protected boolean isStepCompleted(@NonNull Context context) { // note: we can not use mSharedPrefs, since this method might be // called before onAttached is called. return (mSharedPrefs == null ? DirectBootAwareSharedPreferences.create(context) : mSharedPrefs) .getBoolean(SKIPPED_PREF_KEY, false) || SetupSupport.hasLanguagePackForCurrentLocale( AnyApplication.getKeyboardFactory(context).getAllAddOns()); }
@Test public void testHappyPath() { Locale.setDefault(Locale.FRANCE); WizardLanguagePackFragment fragment = startFragment(); Assert.assertFalse(fragment.isStepCompleted(getApplicationContext())); ImageView stateIcon = fragment.getView().findViewById(R.id.step_state_icon); Assert.assertNotNull(stateIcon); Assert.assertTrue(stateIcon.isClickable()); Assert.assertEquals( R.drawable.ic_wizard_download_pack_missing, Shadows.shadowOf(stateIcon.getDrawable()).getCreatedFromResId()); View.OnClickListener stateIconClickHandler = Shadows.shadowOf(stateIcon).getOnClickListener(); View.OnClickListener linkClickHandler = Shadows.shadowOf((View) fragment.getView().findViewById(R.id.go_to_download_packs_action)) .getOnClickListener(); Assert.assertNotNull(stateIconClickHandler); Assert.assertSame(stateIconClickHandler, linkClickHandler); Assert.assertNull( Shadows.shadowOf((Application) ApplicationProvider.getApplicationContext()) .getNextStartedActivity()); stateIconClickHandler.onClick(null); Assert.assertEquals( "Leaving AnySoftKeyboard", GeneralDialogTestUtil.getTitleFromDialog(GeneralDialogTestUtil.getLatestShownDialog())); }
@Override public String resolve(Method method, Object[] arguments, String spelExpression) { if (StringUtils.isEmpty(spelExpression)) { return spelExpression; } if (spelExpression.matches(PLACEHOLDER_SPEL_REGEX) && stringValueResolver != null) { return stringValueResolver.resolveStringValue(spelExpression); } if (spelExpression.matches(METHOD_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } if (spelExpression.matches(BEAN_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); evaluationContext.setBeanResolver(new BeanFactoryResolver(this.beanFactory)); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } return spelExpression; }
@Test public void testRootArgs() throws Exception { String testExpression = "#root.args[0]"; String firstArgument = "test"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{firstArgument}, testExpression); assertThat(result).isEqualTo(firstArgument); }
public static Optional<String> extract(final HttpServletRequest request) { String header = request.getHeader(AUTHORIZATION_HEADER); if (!StringUtils.hasText(header)) { return Optional.empty(); } return extractFromHeader(header.split(HEADER_SPLIT_DELIMITER)); }
@Test void 토큰_헤더가_없다면_빈_값이_반환된다() { // given when(request.getHeader(AUTHORIZATION_HEADER)).thenReturn("InvalidType token"); // when Optional<String> result = AuthenticationExtractor.extract(request); // then assertThat(result).isEmpty(); }
public static OP_TYPE getOpType(final List<Field<?>> fields, final Model model, final String targetFieldName) { return Stream.of(getOpTypeFromTargets(model.getTargets(), targetFieldName), getOpTypeFromMiningFields(model.getMiningSchema(), targetFieldName), getOpTypeFromFields(fields, targetFieldName)) .filter(Optional::isPresent) .map(Optional::get) .findFirst() .orElseThrow(() -> new KiePMMLInternalException(String.format("Failed to find OpType for field" + " %s", targetFieldName))); }
@Test void getOpTypeByTargetsNotFound() { assertThatExceptionOfType(KiePMMLInternalException.class).isThrownBy(() -> { final Model model = new RegressionModel(); final DataDictionary dataDictionary = new DataDictionary(); final MiningSchema miningSchema = new MiningSchema(); final Targets targets = new Targets(); IntStream.range(0, 3).forEach(i -> { String fieldName = "field" + i; final DataField dataField = getRandomDataField(); dataField.setName(fieldName); dataDictionary.addDataFields(dataField); final MiningField miningField = getRandomMiningField(); miningField.setName(dataField.getName()); miningSchema.addMiningFields(miningField); final Target targetField = getRandomTarget(); targetField.setField(dataField.getName()); targets.addTargets(targetField); }); model.setMiningSchema(miningSchema); model.setTargets(targets); org.kie.pmml.compiler.api.utils.ModelUtils.getOpType(getFieldsFromDataDictionary(dataDictionary), model, "NOT_EXISTING"); }); }
@Override public Map<String, Metric> getMetrics() { final Map<String, Metric> gauges = new HashMap<>(); gauges.put("total.init", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getInit() + mxBean.getNonHeapMemoryUsage().getInit()); gauges.put("total.used", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getUsed() + mxBean.getNonHeapMemoryUsage().getUsed()); gauges.put("total.max", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getMax() == -1 ? -1 : mxBean.getHeapMemoryUsage().getMax() + mxBean.getNonHeapMemoryUsage().getMax()); gauges.put("total.committed", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getCommitted() + mxBean.getNonHeapMemoryUsage().getCommitted()); gauges.put("heap.init", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getInit()); gauges.put("heap.used", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getUsed()); gauges.put("heap.max", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getMax()); gauges.put("heap.committed", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getCommitted()); gauges.put("heap.usage", new RatioGauge() { @Override protected Ratio getRatio() { final MemoryUsage usage = mxBean.getHeapMemoryUsage(); return Ratio.of(usage.getUsed(), usage.getMax()); } }); gauges.put("non-heap.init", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getInit()); gauges.put("non-heap.used", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getUsed()); gauges.put("non-heap.max", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getMax()); gauges.put("non-heap.committed", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getCommitted()); gauges.put("non-heap.usage", new RatioGauge() { @Override protected Ratio getRatio() { final MemoryUsage usage = mxBean.getNonHeapMemoryUsage(); return Ratio.of(usage.getUsed(), usage.getMax() == -1 ? usage.getCommitted() : usage.getMax()); } }); for (final MemoryPoolMXBean pool : memoryPools) { final String poolName = name("pools", WHITESPACE.matcher(pool.getName()).replaceAll("-")); gauges.put(name(poolName, "usage"), new RatioGauge() { @Override protected Ratio getRatio() { MemoryUsage usage = pool.getUsage(); return Ratio.of(usage.getUsed(), usage.getMax() == -1 ? usage.getCommitted() : usage.getMax()); } }); gauges.put(name(poolName, "max"), (Gauge<Long>) () -> pool.getUsage().getMax()); gauges.put(name(poolName, "used"), (Gauge<Long>) () -> pool.getUsage().getUsed()); gauges.put(name(poolName, "committed"), (Gauge<Long>) () -> pool.getUsage().getCommitted()); // Only register GC usage metrics if the memory pool supports usage statistics. if (pool.getCollectionUsage() != null) { gauges.put(name(poolName, "used-after-gc"), (Gauge<Long>) () -> pool.getCollectionUsage().getUsed()); } gauges.put(name(poolName, "init"), (Gauge<Long>) () -> pool.getUsage().getInit()); } return Collections.unmodifiableMap(gauges); }
@Test public void hasAGaugeForNonHeapInit() { final Gauge gauge = (Gauge) gauges.getMetrics().get("non-heap.init"); assertThat(gauge.getValue()) .isEqualTo(2L); }
public static boolean containsWord(String word, String body) { // $ is quite a common character for a drools binding but it's not considered a word for the regexp engine // By converting to a character is easier to write the regexp final String wordWithDollarReplaced = word.replaceAll("\\$", "_DOLLAR_"); final String bodyWithDollarReplaced = body.replaceAll("\\$", "_DOLLAR_"); Pattern p = Pattern.compile("\\b" + wordWithDollarReplaced + "\\b", Pattern.UNICODE_CHARACTER_CLASS); return p.matcher(bodyWithDollarReplaced).find(); }
@Test public void containsWordTest() throws Exception { assertThat(containsWord("$cheesery", "results.add($cheeseryResult);\n")).isFalse(); assertThat(containsWord("$cheeseryResult", "results.add($cheeseryResult);\n")).isTrue(); assertThat(containsWord("cheesery", "results.add($cheesery);\n")).isFalse(); }
public List<String> build() { return switch (dialect.getId()) { case Oracle.ID -> forOracle(tableName); case H2.ID, PostgreSql.ID -> singletonList("drop table if exists " + tableName); case MsSql.ID -> // "if exists" is supported only since MSSQL 2016. singletonList("drop table " + tableName); default -> throw new IllegalStateException("Unsupported DB: " + dialect.getId()); }; }
@Test public void drop_columns_on_oracle() { assertThat(new DropTableBuilder(new Oracle(), "issues") .build()).containsExactly( "BEGIN\n" + "EXECUTE IMMEDIATE 'DROP SEQUENCE issues_seq';\n" + "EXCEPTION\n" + "WHEN OTHERS THEN\n" + " IF SQLCODE != -2289 THEN\n" + " RAISE;\n" + " END IF;\n" + "END;", "BEGIN\n" + "EXECUTE IMMEDIATE 'DROP TRIGGER issues_idt';\n" + "EXCEPTION\n" + "WHEN OTHERS THEN\n" + " IF SQLCODE != -4080 THEN\n" + " RAISE;\n" + " END IF;\n" + "END;", "BEGIN\n" + "EXECUTE IMMEDIATE 'DROP TABLE issues';\n" + "EXCEPTION\n" + "WHEN OTHERS THEN\n" + " IF SQLCODE != -942 THEN\n" + " RAISE;\n" + " END IF;\n" + "END;"); }
@Override public Set<Long> calculateUsers(DelegateExecution execution, String param) { Set<Long> groupIds = StrUtils.splitToLongSet(param); List<BpmUserGroupDO> groups = userGroupService.getUserGroupList(groupIds); return convertSetByFlatMap(groups, BpmUserGroupDO::getUserIds, Collection::stream); }
@Test public void testCalculateUsers() { // 准备参数 String param = "1,2"; // mock 方法 BpmUserGroupDO userGroup1 = randomPojo(BpmUserGroupDO.class, o -> o.setUserIds(asSet(11L, 12L))); BpmUserGroupDO userGroup2 = randomPojo(BpmUserGroupDO.class, o -> o.setUserIds(asSet(21L, 22L))); when(userGroupService.getUserGroupList(eq(asSet(1L, 2L)))).thenReturn(Arrays.asList(userGroup1, userGroup2)); // 调用 Set<Long> results = strategy.calculateUsers(null, param); // 断言 assertEquals(asSet(11L, 12L, 21L, 22L), results); }
public static CommandExecutor newInstance(final PostgreSQLCommandPacketType commandPacketType, final PostgreSQLCommandPacket commandPacket, final ConnectionSession connectionSession, final PortalContext portalContext) throws SQLException { if (commandPacket instanceof SQLReceivedPacket) { log.debug("Execute packet type: {}, sql: {}", commandPacketType, ((SQLReceivedPacket) commandPacket).getSQL()); } else { log.debug("Execute packet type: {}", commandPacketType); } if (!(commandPacket instanceof PostgreSQLAggregatedCommandPacket)) { return getCommandExecutor(commandPacketType, commandPacket, connectionSession, portalContext); } PostgreSQLAggregatedCommandPacket aggregatedCommandPacket = (PostgreSQLAggregatedCommandPacket) commandPacket; if (aggregatedCommandPacket.isContainsBatchedStatements()) { return new PostgreSQLAggregatedCommandExecutor(getExecutorsOfAggregatedBatchedStatements(aggregatedCommandPacket, connectionSession, portalContext)); } List<CommandExecutor> result = new ArrayList<>(aggregatedCommandPacket.getPackets().size()); for (PostgreSQLCommandPacket each : aggregatedCommandPacket.getPackets()) { result.add(getCommandExecutor((PostgreSQLCommandPacketType) each.getIdentifier(), each, connectionSession, portalContext)); } return new PostgreSQLAggregatedCommandExecutor(result); }
@Test void assertAggregatedPacketNotBatchedStatements() throws SQLException { PostgreSQLComParsePacket parsePacket = mock(PostgreSQLComParsePacket.class); when(parsePacket.getIdentifier()).thenReturn(PostgreSQLCommandPacketType.PARSE_COMMAND); PostgreSQLComBindPacket bindPacket = mock(PostgreSQLComBindPacket.class); when(bindPacket.getIdentifier()).thenReturn(PostgreSQLCommandPacketType.BIND_COMMAND); PostgreSQLComDescribePacket describePacket = mock(PostgreSQLComDescribePacket.class); when(describePacket.getIdentifier()).thenReturn(PostgreSQLCommandPacketType.DESCRIBE_COMMAND); PostgreSQLComExecutePacket executePacket = mock(PostgreSQLComExecutePacket.class); when(executePacket.getIdentifier()).thenReturn(PostgreSQLCommandPacketType.EXECUTE_COMMAND); PostgreSQLComSyncPacket syncPacket = mock(PostgreSQLComSyncPacket.class); when(syncPacket.getIdentifier()).thenReturn(PostgreSQLCommandPacketType.SYNC_COMMAND); PostgreSQLAggregatedCommandPacket packet = mock(PostgreSQLAggregatedCommandPacket.class); when(packet.isContainsBatchedStatements()).thenReturn(false); when(packet.getPackets()).thenReturn(Arrays.asList(parsePacket, bindPacket, describePacket, executePacket, syncPacket)); CommandExecutor actual = PostgreSQLCommandExecutorFactory.newInstance(null, packet, connectionSession, portalContext); assertThat(actual, instanceOf(PostgreSQLAggregatedCommandExecutor.class)); Iterator<CommandExecutor> actualPacketsIterator = getExecutorsFromAggregatedCommandExecutor((PostgreSQLAggregatedCommandExecutor) actual).iterator(); assertThat(actualPacketsIterator.next(), instanceOf(PostgreSQLComParseExecutor.class)); assertThat(actualPacketsIterator.next(), instanceOf(PostgreSQLComBindExecutor.class)); assertThat(actualPacketsIterator.next(), instanceOf(PostgreSQLComDescribeExecutor.class)); assertThat(actualPacketsIterator.next(), instanceOf(PostgreSQLComExecuteExecutor.class)); assertThat(actualPacketsIterator.next(), instanceOf(PostgreSQLComSyncExecutor.class)); assertFalse(actualPacketsIterator.hasNext()); }
public SearchBuilder query(QueryBuilder queryBuilder) { checkState(this.queryBuilder == null, "queryBuilder is already set"); this.queryBuilder = requireNonNull(queryBuilder, "queryBuilder"); return this; }
@Test public void searchQueryBuilderShouldNotBeSetMultipleTimes() { assertThrows(IllegalStateException.class, () -> { final BoolQueryBuilder queryBuilder = Query.bool(); final SearchBuilder searchBuilder = Search.builder().query(queryBuilder); searchBuilder.query(Query.bool()); }); }
@Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { return create(f, permission, overwrite, true, bufferSize, replication, blockSize, progress); }
@Test public void testMultiChunkFile() throws Exception { Path testPath = new Path(TEST_ROOT_DIR, "testMultiChunk"); FSDataOutputStream fout = localFs.create(testPath); for (int i = 0; i < 1000; i++) { fout.write(("testing" + i).getBytes()); } fout.close(); // Exercise some boundary cases - a divisor of the chunk size // the chunk size, 2x chunk size, and +/-1 around these. readFile(localFs, testPath, 128); readFile(localFs, testPath, 511); readFile(localFs, testPath, 512); readFile(localFs, testPath, 513); readFile(localFs, testPath, 1023); readFile(localFs, testPath, 1024); readFile(localFs, testPath, 1025); }
protected final <T> void convertAndProcessManyJobs(Function<List<T>, List<T>> itemSupplier, Function<T, Job> toJobFunction, Consumer<Integer> amountOfProcessedJobsConsumer) { int amountOfProcessedJobs = 0; List<T> items = getItemsToProcess(itemSupplier, null); while (!items.isEmpty()) { convertAndProcessJobs(items, toJobFunction); amountOfProcessedJobs += items.size(); items = getItemsToProcess(itemSupplier, items); } amountOfProcessedJobsConsumer.accept(amountOfProcessedJobs); }
@Test void convertAndProcessManyJobsReturnsPreviousResultsSoSupplierCanChooseToContinueOrNot() { // GIVEN Function<List<Job>, List<Job>> itemSupplier = jobs -> { if (jobs == null) return asList(anEnqueuedJob().build(), anEnqueuedJob().build()); else if (jobs.size() != 2) throw new IllegalStateException("Previous list with size 2 was expected"); else return emptyList(); }; Function<Job, Job> toJobFunction = job -> job; // WHEN & THEN assertThatCode(() -> task.convertAndProcessManyJobs(itemSupplier, toJobFunction, System.out::println)) .doesNotThrowAnyException(); }
public List<ClusterStateHistoryEntry> getClusterStateHistory() { return clusterStateHistory.getHistory(); }
@Test void state_history_is_initially_empty() { final StateVersionTracker versionTracker = createWithMockedMetrics(); assertTrue(versionTracker.getClusterStateHistory().isEmpty()); }
@Override public Map<String, StepTransition> translate(WorkflowInstance workflowInstance) { WorkflowInstance instance = objectMapper.convertValue(workflowInstance, WorkflowInstance.class); if (instance.getRunConfig() != null) { if (instance.getRunConfig().getPolicy() == RunPolicy.RESTART_FROM_INCOMPLETE || instance.getRunConfig().getPolicy() == RunPolicy.RESTART_FROM_SPECIFIC) { Map<String, StepInstance.Status> statusMap = instance.getAggregatedInfo().getStepAggregatedViews().entrySet().stream() .collect( Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().getStatus())); if (!statusMap.isEmpty()) { instance .getRunConfig() .setStartStepIds( statusMap.entrySet().stream() .filter( entry -> !entry.getValue().isComplete() && (entry.getValue().isTerminal() || entry.getValue() == StepInstance.Status.NOT_CREATED)) .map(Map.Entry::getKey) .collect(Collectors.toList())); } // handle the special case of restarting from a completed step if (instance.getRunConfig().getPolicy() == RunPolicy.RESTART_FROM_SPECIFIC) { String restartStepId = RunRequest.getCurrentNode(instance.getRunConfig().getRestartConfig()).getStepId(); if (!instance.getRunConfig().getStartStepIds().contains(restartStepId)) { instance.getRunConfig().getStartStepIds().add(restartStepId); } } } else { if (workflowInstance.getRunConfig().getStartStepIds() != null) { instance .getRunConfig() .setStartStepIds(new ArrayList<>(workflowInstance.getRunConfig().getStartStepIds())); } if (workflowInstance.getRunConfig().getEndStepIds() != null) { instance .getRunConfig() .setEndStepIds(new ArrayList<>(workflowInstance.getRunConfig().getEndStepIds())); } } } List<String> startStepIds = instance.getRunConfig() != null && instance.getRunConfig().getStartStepIds() != null ? instance.getRunConfig().getStartStepIds() : null; List<String> endStepIds = instance.getRunConfig() != null && instance.getRunConfig().getEndStepIds() != null ? instance.getRunConfig().getEndStepIds() : null; return WorkflowGraph.computeDag(instance.getRuntimeWorkflow(), startStepIds, endStepIds); }
@Test public void testTranslateForRestartFromSpecific() { instance.getRunConfig().setPolicy(RunPolicy.RESTART_FROM_SPECIFIC); instance .getRunConfig() .setRestartConfig( RestartConfig.builder() .addRestartNode("sample-dag-test-3", 1, "job3") .restartPolicy(RunPolicy.RESTART_FROM_SPECIFIC) .build()); Map<String, StepTransition> dag = translator.translate(instance); Assert.assertEquals(new HashSet<>(Arrays.asList("job.2", "job3", "job4")), dag.keySet()); StepTransition jobTransition = new StepTransition(); jobTransition.setPredecessors(Collections.singletonList("job3")); jobTransition.setSuccessors(Collections.singletonMap("job4", "true")); Assert.assertEquals(jobTransition, dag.get("job.2")); jobTransition.setPredecessors(Collections.emptyList()); jobTransition.setSuccessors(new HashMap<>()); jobTransition.getSuccessors().put("job.2", "true"); jobTransition.getSuccessors().put("job4", "true"); Assert.assertEquals(jobTransition, dag.get("job3")); jobTransition.setPredecessors(Arrays.asList("job3", "job.2")); jobTransition.setSuccessors(Collections.emptyMap()); Assert.assertEquals(jobTransition, dag.get("job4")); }
public String getStaticAssets(String pluginId) { return pluginRequestHelper.submitRequest(pluginId, REQUEST_GET_STATIC_ASSETS, new DefaultPluginInteractionCallback<>() { @Override public String onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return getMessageConverter(resolvedExtensionVersion).getStaticAssetsFromResponseBody(responseBody); } }); }
@Test public void shouldFetchStaticAssets() throws Exception { String responseBody = "{ \"assets\": \"assets payload\" }"; when(pluginManager.submitTo(eq(PLUGIN_ID), eq(ANALYTICS_EXTENSION), requestArgumentCaptor.capture())).thenReturn(new DefaultGoPluginApiResponse(SUCCESS_RESPONSE_CODE, responseBody)); String assets = analyticsExtension.getStaticAssets(PLUGIN_ID); assertRequest(requestArgumentCaptor.getValue(), ANALYTICS_EXTENSION, "1.0", REQUEST_GET_STATIC_ASSETS, null); assertThat(assets, is("assets payload")); }
public boolean deleteAccessConfig(String accessKey) { if (StringUtils.isEmpty(accessKey)) { log.error("Parameter value accessKey is null or empty String,Please check your parameter"); return false; } if (accessKeyTable.containsKey(accessKey)) { String aclFileName = accessKeyTable.get(accessKey); PlainAccessData aclAccessConfigData = AclUtils.getYamlDataObject(aclFileName, PlainAccessData.class); if (aclAccessConfigData == null) { log.warn("No data found in {} when deleting access config of {}", aclFileName, accessKey); return true; } List<PlainAccessConfig> accounts = aclAccessConfigData.getAccounts(); Iterator<PlainAccessConfig> itemIterator = accounts.iterator(); while (itemIterator.hasNext()) { if (itemIterator.next().getAccessKey().equals(accessKey)) { // Delete the related acl config element itemIterator.remove(); accessKeyTable.remove(accessKey); aclAccessConfigData.setAccounts(accounts); return AclUtils.writeDataObject(aclFileName, updateAclConfigFileVersion(aclFileName, aclAccessConfigData)); } } } return false; }
@Test public void deleteAccessConfigTest() throws InterruptedException { // delete not exist accessConfig final boolean flag1 = plainPermissionManager.deleteAccessConfig("test_delete"); assert !flag1; plainAccessConfig.setAccessKey("test_delete"); plainAccessConfig.setSecretKey("12345678"); plainAccessConfig.setWhiteRemoteAddress("192.168.1.1"); plainAccessConfig.setAdmin(false); plainAccessConfig.setDefaultGroupPerm(AclConstants.SUB_PUB); plainAccessConfig.setTopicPerms(Arrays.asList(DEFAULT_TOPIC + "=" + AclConstants.PUB)); plainAccessConfig.setGroupPerms(Lists.newArrayList("groupA=SUB")); plainPermissionManager.updateAccessConfig(plainAccessConfig); //delete existed accessConfig final boolean flag2 = plainPermissionManager.deleteAccessConfig("test_delete"); assert flag2; }
protected SparkAppDriverConf buildDriverConf( SparkApplication app, Map<String, String> confOverrides) { ApplicationSpec applicationSpec = app.getSpec(); SparkConf effectiveSparkConf = new SparkConf(); if (MapUtils.isNotEmpty(applicationSpec.getSparkConf())) { for (String confKey : applicationSpec.getSparkConf().keySet()) { effectiveSparkConf.set(confKey, applicationSpec.getSparkConf().get(confKey)); } } if (MapUtils.isNotEmpty(confOverrides)) { for (Map.Entry<String, String> entry : confOverrides.entrySet()) { effectiveSparkConf.set(entry.getKey(), entry.getValue()); } } effectiveSparkConf.set("spark.kubernetes.namespace", app.getMetadata().getNamespace()); MainAppResource primaryResource = new JavaMainAppResource(Option.empty()); if (StringUtils.isNotEmpty(applicationSpec.getJars())) { primaryResource = new JavaMainAppResource(Option.apply(applicationSpec.getJars())); effectiveSparkConf.setIfMissing("spark.jars", applicationSpec.getJars()); } else if (StringUtils.isNotEmpty(applicationSpec.getPyFiles())) { primaryResource = new PythonMainAppResource(applicationSpec.getPyFiles()); effectiveSparkConf.setIfMissing("spark.submit.pyFiles", applicationSpec.getPyFiles()); } else if (StringUtils.isNotEmpty(applicationSpec.getSparkRFiles())) { primaryResource = new RMainAppResource(applicationSpec.getSparkRFiles()); } String sparkMasterUrlPrefix = effectiveSparkConf.get(MASTER_URL_PREFIX_PROPS_NAME, DEFAULT_MASTER_URL_PREFIX); effectiveSparkConf.setIfMissing( "spark.master", sparkMasterUrlPrefix + "https://$KUBERNETES_SERVICE_HOST:$KUBERNETES_SERVICE_PORT"); String appId = generateSparkAppId(app); effectiveSparkConf.setIfMissing("spark.app.id", appId); return SparkAppDriverConf.create( effectiveSparkConf, appId, primaryResource, applicationSpec.getMainClass(), applicationSpec.getDriverArgs().toArray(String[]::new), Option.apply(applicationSpec.getProxyUser())); }
@Test void buildDriverConfForRApp() { Map<SparkAppDriverConf, List<Object>> constructorArgs = new HashMap<>(); try (MockedConstruction<SparkAppDriverConf> mocked = mockConstruction( SparkAppDriverConf.class, (mock, context) -> constructorArgs.put(mock, new ArrayList<>(context.arguments())))) { SparkApplication mockApp = mock(SparkApplication.class); ApplicationSpec mockSpec = mock(ApplicationSpec.class); ObjectMeta appMeta = new ObjectMetaBuilder().withName("app1").withNamespace("ns1").build(); when(mockApp.getSpec()).thenReturn(mockSpec); when(mockApp.getMetadata()).thenReturn(appMeta); when(mockSpec.getSparkRFiles()).thenReturn("foo"); SparkAppSubmissionWorker submissionWorker = new SparkAppSubmissionWorker(); SparkAppDriverConf conf = submissionWorker.buildDriverConf(mockApp, Collections.emptyMap()); assertEquals(6, constructorArgs.get(conf).size()); // validate main resources assertInstanceOf(RMainAppResource.class, constructorArgs.get(conf).get(2)); RMainAppResource mainResource = (RMainAppResource) constructorArgs.get(conf).get(2); assertEquals("foo", mainResource.primaryResource()); } }
@Udf(description = "Converts the number of days since 1970-01-01 00:00:00 UTC/GMT to a date " + "string using the given format pattern. The format pattern should be in the format" + " expected by java.time.format.DateTimeFormatter") public String formatDate( @UdfParameter( description = "The date to convert") final Date date, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { if (date == null || formatPattern == null) { return null; } try { final DateTimeFormatter formatter = formatters.get(formatPattern); return LocalDate.ofEpochDay(TimeUnit.MILLISECONDS.toDays(date.getTime())).format(formatter); } catch (final ExecutionException | RuntimeException e) { throw new KsqlFunctionException("Failed to format date " + date + " with formatter '" + formatPattern + "': " + e.getMessage(), e); } }
@Test public void shouldByThreadSafeAndWorkWithManyDifferentFormatters() { IntStream.range(0, 10_000) .parallel() .forEach(idx -> { try { final String pattern = "yyyy-MM-dd'X" + idx + "'"; final String result = udf.formatDate(Date.valueOf("2021-05-18"), pattern); assertThat(result, is("2021-05-18X" + idx)); } catch (final Exception e) { fail(e.getMessage()); } }); }
public String build( final String cellValue ) { switch ( type ) { case FORALL: return buildForAll( cellValue ); case INDEXED: return buildMulti( cellValue ); default: return buildSingle( cellValue ); } }
@Test public void testBuildSnippetNoPlaceHolder() { final String snippet = "something.getAnother().equals(blah);"; final SnippetBuilder snip = new SnippetBuilder(snippet); final String cellValue = "this is ignored..."; final String result = snip.build(cellValue); assertThat(result).isEqualTo(snippet); }
public static ThreadPoolExecutor newCachedThreadPool(int corePoolSize, int maximumPoolSize) { return new ThreadPoolExecutor(corePoolSize, maximumPoolSize, DateUtils.MILLISECONDS_PER_MINUTE, TimeUnit.MILLISECONDS, new SynchronousQueue<Runnable>()); }
@Test public void newCachedThreadPool1() throws Exception { BlockingQueue<Runnable> queue = new SynchronousQueue<Runnable>(); ThreadFactory factory = new NamedThreadFactory("xxx"); ThreadPoolExecutor executor = ThreadPoolUtils.newCachedThreadPool(10, 20, queue); Assert.assertEquals(executor.getCorePoolSize(), 10); Assert.assertEquals(executor.getMaximumPoolSize(), 20); Assert.assertEquals(executor.getQueue(), queue); }
@Deprecated @Override public Boolean hasAppendsOnly(org.apache.hadoop.hive.ql.metadata.Table hmsTable, SnapshotContext since) { TableDesc tableDesc = Utilities.getTableDesc(hmsTable); Table table = IcebergTableUtil.getTable(conf, tableDesc.getProperties()); return hasAppendsOnly(table.snapshots(), since); }
@Test public void testHasAppendsOnlyReturnsNullWhenTableIsEmptyAndGivenSnapShotIsNull() { HiveIcebergStorageHandler storageHandler = new HiveIcebergStorageHandler(); Boolean result = storageHandler.hasAppendsOnly(Collections.emptyList(), null); assertThat(result, is(true)); }
public UiTopoLayout scale(double scale) { checkArgument(scaleWithinBounds(scale), E_SCALE_OOB); this.scale = scale; return this; }
@Test(expected = IllegalArgumentException.class) public void scaleTooBig() { mkRootLayout(); layout.scale(100.009); }
public void collect(@Nonnull Metric metric, @Nonnull Object value) { String parameter = metric.getQueryParameter(); String valueStr = value.toString(); if (parameters.containsKey(parameter)) { throw new IllegalArgumentException("Parameter " + parameter + " is already added"); } parameters.put(parameter, valueStr); query = null; }
@Test public void checkDuplicateKey() { MetricsCollectionContext context = new MetricsCollectionContext(); context.collect(() -> "1", "hazelcast"); assertThatThrownBy(() -> context.collect(() -> "1", "phonehome")) .isInstanceOf(IllegalArgumentException.class) .hasMessageContaining("Parameter 1 is already added"); }
Path getLayerDirectory(DescriptorDigest layerDigest) { return getLayersDirectory().resolve(layerDigest.getHash()); }
@Test public void testGetLayerDirectory() throws DigestException { DescriptorDigest layerDigest = DescriptorDigest.fromHash( "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); Assert.assertEquals( Paths.get( "cache", "directory", "layers", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), TEST_CACHE_STORAGE_FILES.getLayerDirectory(layerDigest)); }
static QueryId buildId( final Statement statement, final EngineContext engineContext, final QueryIdGenerator idGenerator, final OutputNode outputNode, final boolean createOrReplaceEnabled, final Optional<String> withQueryId) { if (withQueryId.isPresent()) { final String queryId = withQueryId.get().toUpperCase(); validateWithQueryId(queryId); return new QueryId(queryId); } if (statement instanceof CreateTable && ((CreateTable) statement).isSource()) { // Use the CST name as part of the QueryID final String suffix = ((CreateTable) statement).getName().text().toUpperCase() + "_" + idGenerator.getNext().toUpperCase(); return new QueryId(ReservedQueryIdsPrefixes.CST + suffix); } if (!outputNode.getSinkName().isPresent()) { final String prefix = "transient_" + outputNode.getSource().getLeftmostSourceNode().getAlias().text() + "_"; return new QueryId(prefix + Math.abs(ThreadLocalRandom.current().nextLong())); } final KsqlStructuredDataOutputNode structured = (KsqlStructuredDataOutputNode) outputNode; if (!structured.createInto()) { return new QueryId(ReservedQueryIdsPrefixes.INSERT + idGenerator.getNext()); } final SourceName sink = outputNode.getSinkName().get(); final Set<QueryId> queriesForSink = engineContext.getQueryRegistry().getQueriesWithSink(sink); if (queriesForSink.size() > 1) { throw new KsqlException("REPLACE for sink " + sink + " is not supported because there are " + "multiple queries writing into it: " + queriesForSink); } else if (!queriesForSink.isEmpty()) { if (!createOrReplaceEnabled) { final String type = outputNode.getNodeOutputType().getKsqlType().toLowerCase(); throw new UnsupportedOperationException( String.format( "Cannot add %s '%s': A %s with the same name already exists", type, sink.text(), type)); } return Iterables.getOnlyElement(queriesForSink); } final String suffix = outputNode.getId().toString().toUpperCase() + "_" + idGenerator.getNext().toUpperCase(); return new QueryId( outputNode.getNodeOutputType() == DataSourceType.KTABLE ? ReservedQueryIdsPrefixes.CTAS + suffix : ReservedQueryIdsPrefixes.CSAS + suffix ); }
@Test public void shouldThrowIfMultipleQueriesExist() { // Given: when(plan.getSinkName()).thenReturn(Optional.of(SINK)); when(plan.createInto()).thenReturn(true); when(queryRegistry.getQueriesWithSink(SINK)) .thenReturn(ImmutableSet.of(new QueryId("CTAS_FOO_1"), new QueryId("INSERTQUERY_1"))); // When: final KsqlException e = assertThrows(KsqlException.class, () -> QueryIdUtil.buildId(statement, engineContext, idGenerator, plan, false, Optional.empty())); // Then: assertThat(e.getMessage(), containsString("there are multiple queries writing")); }
public boolean contains(AclAction action) { return mActions.get(action.ordinal()); }
@Test public void contains() { AclActions actions = new AclActions(); assertFalse(actions.contains(AclAction.READ)); assertFalse(actions.contains(AclAction.WRITE)); assertFalse(actions.contains(AclAction.EXECUTE)); actions.add(AclAction.READ); assertTrue(actions.contains(AclAction.READ)); actions.add(AclAction.WRITE); assertTrue(actions.contains(AclAction.WRITE)); actions.add(AclAction.EXECUTE); assertTrue(actions.contains(AclAction.EXECUTE)); }
public static File checkSlip(File parentFile, File file) throws IllegalArgumentException { if (null != parentFile && null != file) { if (false == isSub(parentFile, file)) { throw new IllegalArgumentException("New file is outside of the parent dir: " + file.getName()); } } return file; }
@Test public void checkSlipTest() { assertThrows(IllegalArgumentException.class, ()->{ FileUtil.checkSlip(FileUtil.file("test/a"), FileUtil.file("test/../a")); }); }
public static Builder reportMaxDepth(Component.Type reportMaxDepth) { return new Builder(reportMaxDepth); }
@Test @UseDataProvider("reportTypes") public void withViewsMaxDepth_throws_IAE_if_type_is_report(Type reportType) { assertThatThrownBy(() -> CrawlerDepthLimit.reportMaxDepth(reportType).withViewsMaxDepth(reportType)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("A Views max depth must be a views type"); }
public static Optional<String> serialize2String(Object obj) { if (obj == null) { return Optional.of(CommonConst.EMPTY_STR); } try { return Optional.of(MAPPER.writeValueAsString(obj)); } catch (JsonProcessingException ex) { LOGGER.warning(String.format(Locale.ENGLISH, "Can not serialize class [%s] to string", obj.getClass().getName())); } return Optional.of(CommonConst.EMPTY_STR); }
@Test public void test() { final Object target = new Object(); final Optional<String> serialize2String = SerializeUtils.serialize2String(target); Assert.assertTrue(serialize2String.isPresent()); final Optional<String> serialize2String1 = SerializeUtils.serialize2String(null); Assert.assertTrue(serialize2String1.isPresent()); Assert.assertEquals("", serialize2String1.get()); final Optional<String> serialize2String2 = SerializeUtils.serialize2String(new FaultException(-1, "test", new FaultRule())); Assert.assertTrue(serialize2String2.isPresent()); }
public static Deserializer<LacpTerminatorTlv> deserializer() { return (data, offset, length) -> { checkInput(data, offset, length, LENGTH); return new LacpTerminatorTlv(); }; }
@Test public void deserializer() throws Exception { LacpTerminatorTlv lacpTerminatorTlv = LacpTerminatorTlv.deserializer().deserialize(data, 0, data.length); }
public static ResourceModel processResource(final Class<?> resourceClass) { return processResource(resourceClass, null); }
@Test(expectedExceptions = ResourceConfigException.class) public void failsOnDuplicateActionMethod() { @RestLiCollection(name = "duplicateActionMethod") class LocalClass extends CollectionResourceTemplate<Long, EmptyRecord> { @Action(name = "duplicate") public EmptyRecord getThis(@ActionParam("id") Long id) { return new EmptyRecord(); } @Action(name = "duplicate") public EmptyRecord getThat(@ActionParam("id") Long id) { return new EmptyRecord(); } } RestLiAnnotationReader.processResource(LocalClass.class); Assert.fail("#getActionReturnClass should fail throwing a ResourceConfigException"); }
public static Expression convert(Filter[] filters) { Expression expression = Expressions.alwaysTrue(); for (Filter filter : filters) { Expression converted = convert(filter); Preconditions.checkArgument( converted != null, "Cannot convert filter to Iceberg: %s", filter); expression = Expressions.and(expression, converted); } return expression; }
@Test public void testTimestampFilterConversion() { Instant instant = Instant.parse("2018-10-18T00:00:57.907Z"); Timestamp timestamp = Timestamp.from(instant); long epochMicros = ChronoUnit.MICROS.between(Instant.EPOCH, instant); Expression instantExpression = SparkFilters.convert(GreaterThan.apply("x", instant)); Expression timestampExpression = SparkFilters.convert(GreaterThan.apply("x", timestamp)); Expression rawExpression = Expressions.greaterThan("x", epochMicros); assertThat(timestampExpression.toString()) .as("Generated Timestamp expression should be correct") .isEqualTo(rawExpression.toString()); assertThat(instantExpression.toString()) .as("Generated Instant expression should be correct") .isEqualTo(rawExpression.toString()); }
public static ApplicationModel ofNullable(ApplicationModel applicationModel) { if (applicationModel != null) { return applicationModel; } else { return defaultModel(); } }
@Test void testOfNullable() { ApplicationModel applicationModel = ApplicationModel.ofNullable(null); Assertions.assertEquals(ApplicationModel.defaultModel(), applicationModel); applicationModel.getFrameworkModel().destroy(); FrameworkModel frameworkModel = new FrameworkModel(); ApplicationModel applicationModel1 = frameworkModel.newApplication(); ApplicationModel applicationModel2 = ApplicationModel.ofNullable(applicationModel1); Assertions.assertEquals(applicationModel1, applicationModel2); frameworkModel.destroy(); }
@Override public boolean equals(Object o) { return super.equals(o); }
@Test void equals() { LocalComponentIdRedirectPmml original = new LocalComponentIdRedirectPmml(redirectModel, fileName, name); LocalUriId compare = new LocalComponentIdRedirectPmml(redirectModel, fileName, name); assertThat(original.equals(compare)).isTrue(); String path = original.fullPath(); LocalUri parsed = LocalUri.parse(path); compare = new ModelLocalUriId(parsed); assertThat(original.equals(compare)).isTrue(); }
@Override public File getFile(File directory, String metricName) { return new File(directory, sanitize(metricName) + ".csv"); }
@Test public void testGetFileSanitize() { FixedNameCsvFileProvider provider = new FixedNameCsvFileProvider(); File file = provider.getFile(dataDirectory, "/myfake/uri"); assertThat(file.getParentFile()).isEqualTo(dataDirectory); assertThat(file.getName()).isEqualTo("myfake.uri.csv"); }
@ScalarOperator(LESS_THAN_OR_EQUAL) @SqlType(StandardTypes.BOOLEAN) public static boolean lessThanOrEqual(@SqlType(StandardTypes.BIGINT) long left, @SqlType(StandardTypes.BIGINT) long right) { return left <= right; }
@Test public void testLessThanOrEqual() { assertFunction("100000000037 <= 100000000037", BOOLEAN, true); assertFunction("100000000037 <= 100000000017", BOOLEAN, false); assertFunction("100000000017 <= 100000000037", BOOLEAN, true); assertFunction("100000000017 <= 100000000017", BOOLEAN, true); }
@Override public CompletableFuture<JobManagerRunnerResult> getResultFuture() { return resultFuture; }
@Test void testInitializationFailureCompletesResultFuture() { final CompletableFuture<JobMasterService> jobMasterServiceFuture = new CompletableFuture<>(); DefaultJobMasterServiceProcess serviceProcess = createTestInstance(jobMasterServiceFuture); final RuntimeException originalCause = new RuntimeException("Init error"); jobMasterServiceFuture.completeExceptionally(originalCause); final JobManagerRunnerResult actualJobManagerResult = serviceProcess.getResultFuture().join(); assertThat(actualJobManagerResult.isInitializationFailure()).isTrue(); final Throwable initializationFailure = actualJobManagerResult.getInitializationFailure(); assertThat(initializationFailure) .isInstanceOf(JobInitializationException.class) .hasCause(originalCause); }
public static Status getServiceStatus() { return getServiceStatus(SERVICE_STATUS_CALLBACK); }
@Test public void testMultipleServiceStatusCallback() { // Only good should return good ServiceStatus.MultipleCallbackServiceStatusCallback onlyGood = new ServiceStatus.MultipleCallbackServiceStatusCallback(ImmutableList.of(ALWAYS_GOOD)); assertEquals(onlyGood.getServiceStatus(), ServiceStatus.Status.GOOD); // Only bad should return bad ServiceStatus.MultipleCallbackServiceStatusCallback onlyBad = new ServiceStatus.MultipleCallbackServiceStatusCallback(ImmutableList.of(ALWAYS_BAD)); assertEquals(onlyBad.getServiceStatus(), ServiceStatus.Status.BAD); // Only starting should return starting ServiceStatus.MultipleCallbackServiceStatusCallback onlyStarting = new ServiceStatus.MultipleCallbackServiceStatusCallback(ImmutableList.of(ALWAYS_STARTING)); assertEquals(onlyStarting.getServiceStatus(), ServiceStatus.Status.STARTING); // Good + starting = starting ServiceStatus.MultipleCallbackServiceStatusCallback goodAndStarting = new ServiceStatus.MultipleCallbackServiceStatusCallback(ImmutableList.of(ALWAYS_GOOD, ALWAYS_STARTING)); assertEquals(goodAndStarting.getServiceStatus(), ServiceStatus.Status.STARTING); // Good + starting + bad = starting (check for left-to-right evaluation) ServiceStatus.MultipleCallbackServiceStatusCallback goodStartingAndBad = new ServiceStatus.MultipleCallbackServiceStatusCallback( ImmutableList.of(ALWAYS_GOOD, ALWAYS_STARTING, ALWAYS_BAD)); assertEquals(goodStartingAndBad.getServiceStatus(), ServiceStatus.Status.STARTING); }
@SuppressWarnings("unchecked") public static <T> T getProperty(Object bean, String expression) { if (null == bean || StrUtil.isBlank(expression)) { return null; } return (T) BeanPath.create(expression).get(bean); }
@Test public void getPropertyTest() { final SubPerson person = new SubPerson(); person.setAge(14); person.setOpenid("11213232"); person.setName("测试A11"); person.setSubName("sub名字"); final Object name = BeanUtil.getProperty(person, "name"); assertEquals("测试A11", name); final Object subName = BeanUtil.getProperty(person, "subName"); assertEquals("sub名字", subName); }
@Override @SuppressWarnings("unchecked") public Mono<WebSocketCloseStatus> receiveCloseStatus() { return onCloseState.asMono().or((Mono) onTerminate()); }
@Test void testWebSocketServerCancelled() throws InterruptedException { try (LogTracker lt = new LogTracker(HttpServerOperations.class, WebsocketServerOperations.INBOUND_CANCEL_LOG)) { AtomicReference<WebSocketCloseStatus> clientCloseStatus = new AtomicReference<>(); AtomicReference<WebSocketCloseStatus> serverCloseStatus = new AtomicReference<>(); CountDownLatch closeLatch = new CountDownLatch(2); CountDownLatch cancelled = new CountDownLatch(1); AtomicReference<List<String>> serverMsg = new AtomicReference<>(new ArrayList<>()); Sinks.Empty<Void> empty = Sinks.empty(); CancelReceiverHandlerTest cancelReceiver = new CancelReceiverHandlerTest(() -> empty.tryEmitEmpty()); disposableServer = createServer() .handle((in, out) -> out.sendWebsocket((i, o) -> { i.withConnection(conn -> conn.addHandlerLast(cancelReceiver)); i.receiveCloseStatus() .log("server.closestatus") .doOnNext(status -> { serverCloseStatus.set(status); closeLatch.countDown(); }) .subscribe(); Mono<Void> receive = i.receive() .asString() .log("server.receive") .doOnCancel(cancelled::countDown) .doOnNext(s -> serverMsg.get().add(s)) .then(); return Flux.zip(receive, empty.asMono()) .then(Mono.never()); })) .bindNow(); createClient(disposableServer.port()) .websocket() .uri("/test") .handle((in, out) -> { in.receiveCloseStatus() .log("client.closestatus") .doOnNext(status -> { clientCloseStatus.set(status); closeLatch.countDown(); }) .subscribe(); return out.sendString(Mono.just("PING")) .neverComplete(); }) .log("client") .subscribe(); assertThat(closeLatch.await(30, TimeUnit.SECONDS)).isTrue(); // client received closed without any status code assertThat(clientCloseStatus.get()).isNotNull().isEqualTo(WebSocketCloseStatus.EMPTY); // server locally closed abnormally assertThat(serverCloseStatus.get()).isNotNull().isEqualTo(WebSocketCloseStatus.ABNORMAL_CLOSURE); assertThat(lt.latch.await(30, TimeUnit.SECONDS)).isTrue(); assertThat(cancelled.await(30, TimeUnit.SECONDS)).isTrue(); List<String> serverMessages = serverMsg.get(); assertThat(serverMessages).isNotNull(); assertThat(serverMessages.size()).isEqualTo(0); assertThat(cancelReceiver.awaitAllReleased(30)).as("cancelReceiver").isTrue(); } }
public final void hasSize(int expectedSize) { checkArgument(expectedSize >= 0, "expectedSize(%s) must be >= 0", expectedSize); check("size()").that(checkNotNull(actual).size()).isEqualTo(expectedSize); }
@Test public void hasSizeZero() { assertThat(ImmutableMultimap.of()).hasSize(0); }
@Override public HedgeConfig getHedgeConfig() { return hedgeConfig; }
@Test public void shouldPropagateConfig() { then(hedge.getHedgeConfig()).isEqualTo(hedgeConfig); }
public static Locale createLocale( String localeCode ) { if ( Utils.isEmpty( localeCode ) ) { return null; } StringTokenizer parser = new StringTokenizer( localeCode, "_" ); if ( parser.countTokens() == 2 ) { return new Locale( parser.nextToken(), parser.nextToken() ); } if ( parser.countTokens() == 3 ) { return new Locale( parser.nextToken(), parser.nextToken(), parser.nextToken() ); } return new Locale( localeCode ); }
@Test public void createLocale_DoubleCode() throws Exception { assertEquals( Locale.US, EnvUtil.createLocale( "en_US" ) ); }
public static AggregateOperation1<CharSequence, StringBuilder, String> concatenating() { return AggregateOperation .withCreate(StringBuilder::new) .<CharSequence>andAccumulate(StringBuilder::append) .andCombine(StringBuilder::append) .andExportFinish(StringBuilder::toString); }
@Test public void when_concatenating_withoutDelimiter() { validateOpWithoutDeduct( concatenating(), StringBuilder::toString, "A", "B", "A", "AB", "AB" ); }
@Override @Transactional(rollbackFor = Exception.class) public void updateSpuStock(Map<Long, Integer> stockIncrCounts) { stockIncrCounts.forEach((id, incCount) -> productSpuMapper.updateStock(id, incCount)); }
@Test public void testUpdateSpuStock() { // 准备参数 Map<Long, Integer> stockIncrCounts = MapUtil.builder(1L, 10).put(2L, -20).build(); // mock 方法(数据) productSpuMapper.insert(randomPojo(ProductSpuDO.class, o ->{ o.setCategoryId(generateId()); o.setBrandId(generateId()); o.setDeliveryTemplateId(generateId()); o.setSort(RandomUtil.randomInt(1,100)); // 限制排序范围 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setVirtualSalesCount(generaInt()); // 限制范围为正整数 o.setPrice(generaInt()); // 限制范围为正整数 o.setMarketPrice(generaInt()); // 限制范围为正整数 o.setCostPrice(generaInt()); // 限制范围为正整数 o.setStock(generaInt()); // 限制范围为正整数 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setSalesCount(generaInt()); // 限制范围为正整数 o.setBrowseCount(generaInt()); // 限制范围为正整数 o.setId(1L).setStock(20); })); productSpuMapper.insert(randomPojo(ProductSpuDO.class, o -> { o.setCategoryId(generateId()); o.setBrandId(generateId()); o.setDeliveryTemplateId(generateId()); o.setSort(RandomUtil.randomInt(1,100)); // 限制排序范围 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setVirtualSalesCount(generaInt()); // 限制范围为正整数 o.setPrice(generaInt()); // 限制范围为正整数 o.setMarketPrice(generaInt()); // 限制范围为正整数 o.setCostPrice(generaInt()); // 限制范围为正整数 o.setStock(generaInt()); // 限制范围为正整数 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setSalesCount(generaInt()); // 限制范围为正整数 o.setBrowseCount(generaInt()); // 限制范围为正整数 o.setId(2L).setStock(30); })); // 调用 productSpuService.updateSpuStock(stockIncrCounts); // 断言 assertEquals(productSpuService.getSpu(1L).getStock(), 30); assertEquals(productSpuService.getSpu(2L).getStock(), 10); }
@Override public List<TimelineEntity> getContainerEntities( ApplicationId appId, String fields, Map<String, String> filters, long limit, String fromId) throws IOException { String path = PATH_JOINER.join("clusters", clusterId, "apps", appId, "entities", YARN_CONTAINER); if (fields == null || fields.isEmpty()) { fields = "INFO"; } MultivaluedMap<String, String> params = new MultivaluedMapImpl(); params.add("fields", fields); if (limit > 0) { params.add("limit", Long.toString(limit)); } if (fromId != null && !fromId.isEmpty()) { params.add("fromid", fromId); } mergeFilters(params, filters); ClientResponse response = doGetUri(baseUri, path, params); TimelineEntity[] entity = response.getEntity(TimelineEntity[].class); return Arrays.asList(entity); }
@Test void testGetContainers() throws Exception { ApplicationId appId = ApplicationId.fromString("application_1234_0001"); List<TimelineEntity> entities = client.getContainerEntities(appId, null, null, 0, null); assertEquals(2, entities.size()); assertEquals("mockContainer2", entities.get(1).getId()); }
public static void setAll(MemoryBuffer bitmapBuffer, int baseOffset, int valueCount) { final int sizeInBytes = (valueCount + 7) / 8; // If value count is not a multiple of 8, then calculate number of used bits in the last byte final int remainder = valueCount % 8; final int sizeInBytesMinus1 = sizeInBytes - 1; int bytesMinus1EndOffset = baseOffset + sizeInBytesMinus1; for (int i = baseOffset; i < bytesMinus1EndOffset; i++) { bitmapBuffer.putByte(i, (byte) 0xff); } // handling with the last byte // since unsafe putLong use native byte order, maybe not big endian, // see java.nio.DirectByteBuffer.putLong(long, long), we can't use unsafe.putLong // for bit operations, native byte order may be subject to change between machine if (remainder != 0) { // Every byte is set form right to left byte byteValue = (byte) (0xff >>> (8 - remainder)); bitmapBuffer.putByte(baseOffset + sizeInBytesMinus1, byteValue); } }
@Test public void testSetAll() { int valueCount = 10; MemoryBuffer buffer = MemoryUtils.buffer(8); BitUtils.setAll(buffer, 0, valueCount); assertEquals(BitUtils.getNullCount(buffer, 0, valueCount), 0); assertEquals("ff03000000000000", StringUtils.encodeHexString(buffer.getRemainingBytes())); }
public void defineDocStringType(DocStringType docStringType) { DocStringType existing = lookupByContentTypeAndType(docStringType.getContentType(), docStringType.getType()); if (existing != null) { throw createDuplicateTypeException(existing, docStringType); } Map<Type, DocStringType> map = docStringTypes.computeIfAbsent(docStringType.getContentType(), s -> new HashMap<>()); map.put(docStringType.getType(), docStringType); docStringTypes.put(docStringType.getContentType(), map); }
@Test void doc_string_types_of_same_content_type_must_have_unique_return_type() { registry.defineDocStringType(new DocStringType( JsonNode.class, "application/json", (String s) -> null)); DocStringType duplicate = new DocStringType( JsonNode.class, "application/json", (String s) -> null); CucumberDocStringException exception = assertThrows( CucumberDocStringException.class, () -> registry.defineDocStringType(duplicate)); assertThat(exception.getMessage(), is("" + "There is already docstring type registered for 'application/json' and com.fasterxml.jackson.databind.JsonNode.\n" + "You are trying to add 'application/json' and com.fasterxml.jackson.databind.JsonNode")); }
public String nextNonCliCommand() { String line; do { line = terminal.readLine(); } while (maybeHandleCliSpecificCommands(line)); return line; }
@Test public void shouldSwallowCliCommandLinesEvenWithWhiteSpace() { // Given: when(lineSupplier.get()) .thenReturn(" \t " + CLI_CMD_NAME + " \t ") .thenReturn("not a CLI command;"); // When: final String result = console.nextNonCliCommand(); // Then: assertThat(result, is("not a CLI command;")); }
public void addData(String key, String value) throws InvalidSCMRevisionDataException { validateDataKey(key); data.put(key, value); }
@Test public void shouldThrowExceptionIfDataKeyContainsCharactersOtherThanAlphaNumericAndUnderScoreCharacters() throws Exception { SCMRevision scmRevision = new SCMRevision("rev123", new Date(), "loser", null, null, null); try { scmRevision.addData("HEL-LO-WORLD", "value"); fail("should have thrown exception"); } catch (InvalidSCMRevisionDataException e) { assertThat(e.getMessage(), is("Key 'HEL-LO-WORLD' is invalid. Key names should consists of only alphanumeric characters and/or underscores.")); } }
public static DiskValidator getInstance(Class<? extends DiskValidator> clazz) { DiskValidator diskValidator; if (INSTANCES.containsKey(clazz)) { diskValidator = INSTANCES.get(clazz); } else { diskValidator = ReflectionUtils.newInstance(clazz, null); // check the return of putIfAbsent() to see if any other thread have put // the instance with the same key into INSTANCES DiskValidator diskValidatorRet = INSTANCES.putIfAbsent(clazz, diskValidator); if (diskValidatorRet != null) { diskValidator = diskValidatorRet; } } return diskValidator; }
@Test public void testGetInstance() throws DiskErrorException { DiskValidator diskValidator = DiskValidatorFactory.getInstance("basic"); assertNotNull("Fail to get the instance.", diskValidator); assertEquals("Fail to create the correct instance.", diskValidator.getClass(), BasicDiskValidator.class); assertNotNull("Fail to cache the object", DiskValidatorFactory.INSTANCES. get(BasicDiskValidator.class)); }
public abstract long observeWm(int queueIndex, long wmValue);
@Test public void when_duplicateIdleMessage_then_processed() { // Duplicate idle messages are possible in this scenario: // A source instance emits IDLE_MESSAGE, then an event (not causing a WM) and then another // IDLE_MESSAGE again. The IDLE_MESSAGE is broadcast, but the event is not. So a downstream // instance can receive two IDLE_MESSAGE-s in a row. assertEquals(Long.MIN_VALUE, wc.observeWm(0, IDLE_MESSAGE.timestamp())); assertEquals(Long.MIN_VALUE, wc.observeWm(0, IDLE_MESSAGE.timestamp())); }
@Override public V put(@Nullable final K key, final V value) { if (key == null) { if (nullEntry == null) { _size += 1; nullEntry = new Entry<>(null, value); return null; } return nullEntry.setValue(value); } final Entry<K, V>[] table = this.table; final int hash = key.hashCode(); final int index = HashUtil.indexFor(hash, table.length, mask); for (Entry<K, V> e = table[index]; e != null; e = e.hashNext) { final K entryKey; if ((entryKey = e.key) == key || entryKey.equals(key)) { return e.setValue(value); } } final Entry<K, V> e = new Entry<>(key, value); e.hashNext = table[index]; table[index] = e; _size += 1; if (_size > capacity) { rehash(HashUtil.nextCapacity(capacity)); } return null; }
@Test public void forEachProcedure() { final HashMap<Integer, String> tested = new HashMap<>(); for (int i = 0; i < 100000; ++i) { tested.put(i, Integer.toString(i)); } tested.put(null, "null"); final int[] ii = {0}; tested.forEachKey(object -> { ii[0]++; return true; }); tested.forEachValue(object -> { ii[0]++; return true; }); Assert.assertEquals(tested.size() * 2, ii[0]); ii[0] = 0; tested.forEachKey(object -> { ii[0]++; return (object == null) || (object < 500); }); tested.forEachValue(object -> { ii[0]++; return true; }); Assert.assertEquals(tested.size() + 502, ii[0]); }
@Override public Optional<DataFileChannel> getDataFileChannel( PageId pageId, int pageOffset, int bytesToRead, CacheContext cacheContext) throws PageNotFoundException { Preconditions.checkArgument(pageOffset <= mOptions.getPageSize(), "Read exceeds page boundary: offset=%s size=%s", pageOffset, mOptions.getPageSize()); LOG.debug("get({},pageOffset={}) enters", pageId, pageOffset); if (mState.get() == NOT_IN_USE) { Metrics.GET_NOT_READY_ERRORS.inc(); Metrics.GET_ERRORS.inc(); throw new PageNotFoundException(String.format("Page %s could not be found", pageId)); } ReadWriteLock pageLock = getPageLock(pageId); long startTime = System.nanoTime(); try (LockResource r = new LockResource(pageLock.readLock())) { PageInfo pageInfo; try (LockResource r2 = new LockResource(mPageMetaStore.getLock().readLock())) { pageInfo = mPageMetaStore.getPageInfo(pageId); //check if page exists and refresh LRU items } catch (PageNotFoundException e) { LOG.debug("getDataChannel({},pageOffset={}) fails due to page not found in metastore", pageId, pageOffset); return Optional.empty(); } try { DataFileChannel dataFileChannel = pageInfo.getLocalCacheDir().getPageStore() .getDataFileChannel(pageInfo.getPageId(), pageOffset, bytesToRead, cacheContext.isTemporary()); MultiDimensionalMetricsSystem.CACHED_DATA_READ.inc(bytesToRead); MetricsSystem.counter(MetricKey.CLIENT_CACHE_HIT_REQUESTS.getName()).inc(); MetricsSystem.meter(MetricKey.CLIENT_CACHE_BYTES_READ_CACHE.getName()).mark(bytesToRead); cacheContext.incrementCounter(MetricKey.CLIENT_CACHE_BYTES_READ_CACHE.getMetricName(), BYTE, bytesToRead); LOG.debug("getDataChannel({},pageOffset={}) exits", pageId, pageOffset); return Optional.of(dataFileChannel); } catch (PageNotFoundException e) { LOG.debug("getDataChannel({},pageOffset={}) fails due to page file not found", pageId, pageOffset); Metrics.GET_ERRORS.inc(); Metrics.GET_STORE_READ_ERRORS.inc(); // something is wrong to read this page, let's remove it from meta store try (LockResource r2 = new LockResource(mPageMetaStore.getLock().writeLock())) { mPageMetaStore.removePage(pageId); return Optional.empty(); } catch (PageNotFoundException ex) { // best effort to remove this page from meta store and ignore the exception Metrics.CLEANUP_GET_ERRORS.inc(); return Optional.empty(); } } } finally { cacheContext.incrementCounter( MetricKey.CLIENT_CACHE_PAGE_READ_CACHE_TIME_NS.getMetricName(), NANO, System.nanoTime() - startTime); } }
@Test public void getDataFileChannel() throws Exception { mCacheManager = createLocalCacheManager(); mCacheManager.put(PAGE_ID1, PAGE1); CacheContext cacheContext = CacheContext.defaults(); Optional<DataFileChannel> dataFileChannel = mCacheManager.getDataFileChannel(PAGE_ID1, 0, PAGE1.length, cacheContext); assertNotNull(dataFileChannel); assertEquals(dataFileChannel.isPresent(), true); assertEquals(dataFileChannel.get().getNettyOutput() instanceof DefaultFileRegion, true); DefaultFileRegion defaultFileRegion = (DefaultFileRegion) dataFileChannel.get().getNettyOutput(); ByteBuf buf = Unpooled.buffer(PAGE1.length); NettyBufTargetBuffer targetBuffer = new NettyBufTargetBuffer(buf); long bytesTransferred = defaultFileRegion.transferTo(targetBuffer.byteChannel(), 0); assertEquals(bytesTransferred, PAGE1.length); byte[] bytes = new byte[PAGE1.length]; buf.readBytes(bytes); assertArrayEquals(PAGE1, bytes); }
public Map<String, Object> getKsqlStreamConfigProps(final String applicationId) { final Map<String, Object> map = new HashMap<>(getKsqlStreamConfigProps()); map.put( MetricCollectors.RESOURCE_LABEL_PREFIX + StreamsConfig.APPLICATION_ID_CONFIG, applicationId ); // Streams client metrics aren't used in Confluent deployment possiblyConfigureConfluentTelemetry(map); return Collections.unmodifiableMap(map); }
@Test public void shouldSetStreamsConfigKsqlProducerPrefixedProperties() { final KsqlConfig ksqlConfig = new KsqlConfig( Collections.singletonMap( KsqlConfig.KSQL_STREAMS_PREFIX + StreamsConfig.PRODUCER_PREFIX + ProducerConfig.BUFFER_MEMORY_CONFIG, "1024")); assertThat(ksqlConfig.getKsqlStreamConfigProps() .get(StreamsConfig.PRODUCER_PREFIX + ProducerConfig.BUFFER_MEMORY_CONFIG), equalTo(1024L)); assertThat(ksqlConfig.getKsqlStreamConfigProps() .get(ProducerConfig.BUFFER_MEMORY_CONFIG), is(nullValue())); assertThat(ksqlConfig.getKsqlStreamConfigProps() .get(KsqlConfig.KSQL_STREAMS_PREFIX + StreamsConfig.PRODUCER_PREFIX + ProducerConfig.BUFFER_MEMORY_CONFIG), is(nullValue())); }
public List<T> findCycle() { resetState(); for (T vertex : graph.getVertices()) { if (colors.get(vertex) == WHITE) { if (visitDepthFirst(vertex, new ArrayList<>(List.of(vertex)))) { if (cycle == null) throw new IllegalStateException("Null cycle - this should never happen"); if (cycle.isEmpty()) throw new IllegalStateException("Empty cycle - this should never happen"); log.log(FINE, () -> "Cycle detected: " + cycle); return cycle; } } } return new ArrayList<>(); }
@Test void findCycle_is_idempotent_without_cycle() { var graph = new Graph<Vertices>(); graph.edge(A, B); var cycleFinder = new CycleFinder<>(graph); assertTrue(cycleFinder.findCycle().isEmpty()); assertTrue(cycleFinder.findCycle().isEmpty()); }
@Override public FileLock lock(long position, long size, boolean shared) throws IOException { checkLockArguments(position, size, shared); // lock is interruptible boolean completed = false; try { begin(); completed = true; return new FakeFileLock(this, position, size, shared); } finally { try { end(completed); } catch (ClosedByInterruptException e) { throw new FileLockInterruptionException(); } } }
@Test public void testLock() throws IOException { FileChannel channel = channel(regularFile(10), READ, WRITE); assertNotNull(channel.lock()); assertNotNull(channel.lock(0, 10, false)); assertNotNull(channel.lock(0, 10, true)); assertNotNull(channel.tryLock()); assertNotNull(channel.tryLock(0, 10, false)); assertNotNull(channel.tryLock(0, 10, true)); FileLock lock = channel.lock(); assertTrue(lock.isValid()); lock.release(); assertFalse(lock.isValid()); }
@VisibleForTesting void validateCaptcha(AuthLoginReqVO reqVO) { // 如果验证码关闭,则不进行校验 if (!captchaEnable) { return; } // 校验验证码 ValidationUtils.validate(validator, reqVO, AuthLoginReqVO.CodeEnableGroup.class); CaptchaVO captchaVO = new CaptchaVO(); captchaVO.setCaptchaVerification(reqVO.getCaptchaVerification()); ResponseModel response = captchaService.verification(captchaVO); // 验证不通过 if (!response.isSuccess()) { // 创建登录失败日志(验证码不正确) createLoginLog(null, reqVO.getUsername(), LoginLogTypeEnum.LOGIN_USERNAME, LoginResultEnum.CAPTCHA_CODE_ERROR); throw exception(AUTH_LOGIN_CAPTCHA_CODE_ERROR, response.getRepMsg()); } }
@Test public void testValidateCaptcha_constraintViolationException() { // 准备参数 AuthLoginReqVO reqVO = randomPojo(AuthLoginReqVO.class).setCaptchaVerification(null); // mock 验证码打开 ReflectUtil.setFieldValue(authService, "captchaEnable", true); // 调用,并断言异常 assertThrows(ConstraintViolationException.class, () -> authService.validateCaptcha(reqVO), "验证码不能为空"); }
@Override public String getMethod() { return PATH; }
@Test public void testGetMyDefaultAdministratorRightsWithAllSetForGroups() { GetMyDefaultAdministratorRights getMyDefaultAdministratorRights = GetMyDefaultAdministratorRights .builder() .build(); assertEquals("getMyDefaultAdministratorRights", getMyDefaultAdministratorRights.getMethod()); assertDoesNotThrow(getMyDefaultAdministratorRights::validate); }
static boolean isRetryable(Configuration conf, Exception ex) { if(ex instanceof SQLException) { SQLException sqlException = (SQLException)ex; if (MANUAL_RETRY.equalsIgnoreCase(sqlException.getSQLState())) { // Manual retry exception was thrown return true; } if ("08S01".equalsIgnoreCase(sqlException.getSQLState())) { //in MSSQL this means Communication Link Failure return true; } if ("ORA-08176".equalsIgnoreCase(sqlException.getSQLState()) || sqlException.getMessage().contains("consistent read failure; rollback data not available")) { return true; } String regex = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.TXN_RETRYABLE_SQLEX_REGEX); if (regex != null && !regex.isEmpty()) { String[] patterns = regex.split(",(?=\\S)"); String message = getMessage(ex); for (String p : patterns) { if (Pattern.matches(p, message)) { return true; } } } //see also https://issues.apache.org/jira/browse/HIVE-9938 } return false; }
@Test public void testRetryableRegex() { HiveConf conf = new HiveConf(); SQLException sqlException = new SQLException("ORA-08177: can't serialize access for this transaction", "72000"); // Note that we have 3 regex'es below conf.setVar(HiveConf.ConfVars.HIVE_TXN_RETRYABLE_SQLEX_REGEX, "^Deadlock detected, roll back,.*08177.*,.*08178.*"); boolean result = SqlRetryHandler.isRetryable(conf, sqlException); Assert.assertTrue("regex should be retryable", result); sqlException = new SQLException("This error message, has comma in it"); conf.setVar(HiveConf.ConfVars.HIVE_TXN_RETRYABLE_SQLEX_REGEX, ".*comma.*"); result = SqlRetryHandler.isRetryable(conf, sqlException); Assert.assertTrue("regex should be retryable", result); }
@Override public Mono<GetPreKeysResponse> getPreKeys(final GetPreKeysAnonymousRequest request) { final ServiceIdentifier serviceIdentifier = ServiceIdentifierUtil.fromGrpcServiceIdentifier(request.getRequest().getTargetIdentifier()); final byte deviceId = DeviceIdUtil.validate(request.getRequest().getDeviceId()); return switch (request.getAuthorizationCase()) { case GROUP_SEND_TOKEN -> groupSendTokenUtil.checkGroupSendToken(request.getGroupSendToken(), List.of(serviceIdentifier)) .then(lookUpAccount(serviceIdentifier, Status.NOT_FOUND)) .flatMap(targetAccount -> KeysGrpcHelper.getPreKeys(targetAccount, serviceIdentifier.identityType(), deviceId, keysManager)); case UNIDENTIFIED_ACCESS_KEY -> lookUpAccount(serviceIdentifier, Status.UNAUTHENTICATED) .flatMap(targetAccount -> UnidentifiedAccessUtil.checkUnidentifiedAccess(targetAccount, request.getUnidentifiedAccessKey().toByteArray()) ? KeysGrpcHelper.getPreKeys(targetAccount, serviceIdentifier.identityType(), deviceId, keysManager) : Mono.error(Status.UNAUTHENTICATED.asException())); default -> Mono.error(Status.INVALID_ARGUMENT.asException()); }; }
@Test void getPreKeysGroupSendEndorsement() throws Exception { final Account targetAccount = mock(Account.class); final Device targetDevice = DevicesHelper.createDevice(Device.PRIMARY_ID); when(targetAccount.getDevice(Device.PRIMARY_ID)).thenReturn(Optional.of(targetDevice)); final ECKeyPair identityKeyPair = Curve.generateKeyPair(); final IdentityKey identityKey = new IdentityKey(identityKeyPair.getPublicKey()); final UUID uuid = UUID.randomUUID(); final AciServiceIdentifier identifier = new AciServiceIdentifier(uuid); final byte[] unidentifiedAccessKey = TestRandomUtil.nextBytes(UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH); when(targetAccount.getUnidentifiedAccessKey()).thenReturn(Optional.of(unidentifiedAccessKey)); when(targetAccount.getIdentifier(IdentityType.ACI)).thenReturn(uuid); when(targetAccount.getIdentityKey(IdentityType.ACI)).thenReturn(identityKey); when(accountsManager.getByServiceIdentifierAsync(identifier)) .thenReturn(CompletableFuture.completedFuture(Optional.of(targetAccount))); final ECPreKey ecPreKey = new ECPreKey(1, Curve.generateKeyPair().getPublicKey()); final ECSignedPreKey ecSignedPreKey = KeysHelper.signedECPreKey(2, identityKeyPair); final KEMSignedPreKey kemSignedPreKey = KeysHelper.signedKEMPreKey(3, identityKeyPair); when(keysManager.takeEC(uuid, Device.PRIMARY_ID)) .thenReturn(CompletableFuture.completedFuture(Optional.of(ecPreKey))); when(keysManager.takePQ(uuid, Device.PRIMARY_ID)) .thenReturn(CompletableFuture.completedFuture(Optional.of(kemSignedPreKey))); when(keysManager.getEcSignedPreKey(uuid, Device.PRIMARY_ID)) .thenReturn(CompletableFuture.completedFuture(Optional.of(ecSignedPreKey))); // Expirations must be on day boundaries or libsignal will refuse to create or verify the token final Instant expiration = Instant.now().truncatedTo(ChronoUnit.DAYS); CLOCK.pin(expiration.minus(Duration.ofHours(1))); // set time so the credential isn't expired yet final byte[] token = AuthHelper.validGroupSendToken(SERVER_SECRET_PARAMS, List.of(identifier), expiration); final GetPreKeysResponse response = unauthenticatedServiceStub().getPreKeys(GetPreKeysAnonymousRequest.newBuilder() .setGroupSendToken(ByteString.copyFrom(token)) .setRequest(GetPreKeysRequest.newBuilder() .setTargetIdentifier(ServiceIdentifierUtil.toGrpcServiceIdentifier(identifier)) .setDeviceId(Device.PRIMARY_ID)) .build()); final GetPreKeysResponse expectedResponse = GetPreKeysResponse.newBuilder() .setIdentityKey(ByteString.copyFrom(identityKey.serialize())) .putPreKeys(Device.PRIMARY_ID, GetPreKeysResponse.PreKeyBundle.newBuilder() .setEcOneTimePreKey(toGrpcEcPreKey(ecPreKey)) .setEcSignedPreKey(toGrpcEcSignedPreKey(ecSignedPreKey)) .setKemOneTimePreKey(toGrpcKemSignedPreKey(kemSignedPreKey)) .build()) .build(); assertEquals(expectedResponse, response); }
public void setAuthenticationPolicy(AuthenticationPolicy authenticationPolicy) { this.authenticationPolicy = authenticationPolicy; }
@Test public void testSetAuthenticationPolicy() { AuthenticationPolicy policy = new AuthenticationPolicy() { @Override public void customizeSubject(Subject.Builder subjectBuilder, ConnectionReference ref) { //To change body of implemented methods use File | Settings | File Templates. } @Override public boolean isAuthenticationRequired(SubjectConnectionReference ref) { return false; //To change body of implemented methods use File | Settings | File Templates. } }; factory.setAuthenticationPolicy(policy); assertSame(policy, factory.getAuthenticationPolicy()); }
public static boolean isEmpty(Collection<?> collection) { return collection == null || collection.isEmpty(); }
@Test void testIsEmpty() { assertThat(isEmpty(null), is(true)); assertThat(isEmpty(new HashSet()), is(true)); assertThat(isEmpty(emptyList()), is(true)); }
@Override public double cost(Link link, ResourceContext context) { // explicitly call a method not depending on LinkResourceService return cost(link); }
@Test public void testValidCost() { TierConstraint constraint = new TierConstraint(true, TierConstraint.CostType.VALID, 2, 1); assertThat(constraint.cost(link1, resourceContext), is(1.0)); assertThat(constraint.cost(link2, resourceContext), is(1.0)); assertThat(constraint.cost(link3, resourceContext), is(-1.0)); }
@Override public boolean matchToken(TokenQueue tokenQueue, List<Token> matchedTokenList) { matcher.matchToken(tokenQueue, matchedTokenList); return true; }
@Test public void shouldMatch() { TokenQueue tokenQueue = spy(new TokenQueue()); TokenMatcher delegate = mock(TokenMatcher.class); OptTokenMatcher matcher = new OptTokenMatcher(delegate); List<Token> output = mock(List.class); assertThat(matcher.matchToken(tokenQueue, output), is(true)); verify(delegate).matchToken(tokenQueue, output); verifyNoMoreInteractions(delegate); verifyNoMoreInteractions(tokenQueue); verifyNoMoreInteractions(output); }
long getDfsUsed() throws IOException { long dfsUsed = 0L; for (FsVolumeImpl v : volumes) { try(FsVolumeReference ref = v.obtainReference()) { dfsUsed += v.getDfsUsed(); } catch (ClosedChannelException e) { // ignore. } } return dfsUsed; }
@Test public void testNonDfsUsedMetricForVolume() throws Exception { File volDir = new File(baseDir, "volume-0"); volDir.mkdirs(); /* * Lets have the example. * Capacity - 1000 * Reserved - 100G * DfsUsed - 200 * Actual Non-DfsUsed - 300 -->(expected) * ReservedForReplicas - 50 */ long diskCapacity = 1000L; long duReserved = 100L; long dfsUsage = 200L; long actualNonDfsUsage = 300L; long reservedForReplicas = 50L; conf.setLong(DFSConfigKeys.DFS_DATANODE_DU_RESERVED_KEY, duReserved); FsVolumeImpl volume = new FsVolumeImplBuilder().setDataset(dataset) .setStorageDirectory( new StorageDirectory( StorageLocation.parse(volDir.getPath()))) .setStorageID("storage-id") .setConf(conf) .build(); FsVolumeImpl spyVolume = Mockito.spy(volume); // Set Capacity for testing long testCapacity = diskCapacity - duReserved; spyVolume.setCapacityForTesting(testCapacity); // Mock volume.getDfAvailable() long dfAvailable = diskCapacity - dfsUsage - actualNonDfsUsage; Mockito.doReturn(dfAvailable).when(spyVolume).getDfAvailable(); // Mock dfsUsage Mockito.doReturn(dfsUsage).when(spyVolume).getDfsUsed(); // Mock reservedForReplcas Mockito.doReturn(reservedForReplicas).when(spyVolume) .getReservedForReplicas(); Mockito.doReturn(actualNonDfsUsage).when(spyVolume) .getActualNonDfsUsed(); long expectedNonDfsUsage = actualNonDfsUsage - duReserved; assertEquals(expectedNonDfsUsage, spyVolume.getNonDfsUsed()); }
public LongRunningProcessStatus getDrainStatus(String workerId) { long startTime = System.nanoTime(); LongRunningProcessStatus status = Optional.ofNullable(workerId).map(id -> Optional.ofNullable(drainOpStatusMap.get(id)).map(opStatus -> switch (opStatus) { case DrainCompleted -> LongRunningProcessStatus.forStatus(LongRunningProcessStatus.Status.SUCCESS); case DrainInProgress -> LongRunningProcessStatus.forStatus(LongRunningProcessStatus.Status.RUNNING); case DrainNotInProgress -> LongRunningProcessStatus.forStatus(LongRunningProcessStatus.Status.NOT_RUN); }).orElse( LongRunningProcessStatus.forError("Worker " + id + " not found in drain records") ) ).orElse( new LongRunningProcessStatus() ); log.info("Get drain status for worker {} - execution time: {} sec; returning status={}, error={}", workerId, NANOSECONDS.toSeconds (System.nanoTime() - startTime), status.status, status.lastError); return status; }
@Test public void testGetDrainStatus() throws Exception { // Clear the drain status map in the SchedulerManager; all other parameters are don't care for a clear. callGetDrainStatus(null, DrainOps.ClearDrainMap, SchedulerManager.DrainOpStatus.DrainCompleted); // Set up drain status for some fake workers. callGetDrainStatus("worker-1", DrainOps.SetDrainStatus, SchedulerManager.DrainOpStatus.DrainCompleted); callGetDrainStatus("worker-2", DrainOps.SetDrainStatus, SchedulerManager.DrainOpStatus.DrainInProgress); callGetDrainStatus("worker-3", DrainOps.SetDrainStatus, SchedulerManager.DrainOpStatus.DrainNotInProgress); // Get status; the status passed in a a don't care. LongRunningProcessStatus dStatus; dStatus = callGetDrainStatus("worker-0", DrainOps.GetDrainStatus, SchedulerManager.DrainOpStatus.DrainNotInProgress); Assert.assertTrue(dStatus.status == LongRunningProcessStatus.Status.ERROR); Assert.assertTrue(dStatus.lastError.matches("(.)+(not found)(.)+")); dStatus = callGetDrainStatus("worker-1", DrainOps.GetDrainStatus, SchedulerManager.DrainOpStatus.DrainNotInProgress); Assert.assertTrue(dStatus.status == LongRunningProcessStatus.Status.SUCCESS); Assert.assertTrue(dStatus.lastError.isEmpty()); dStatus = callGetDrainStatus("worker-2", DrainOps.GetDrainStatus, SchedulerManager.DrainOpStatus.DrainNotInProgress); Assert.assertTrue(dStatus.status == LongRunningProcessStatus.Status.RUNNING); Assert.assertTrue(dStatus.lastError.isEmpty()); dStatus = callGetDrainStatus("worker-3", DrainOps.GetDrainStatus, SchedulerManager.DrainOpStatus.DrainNotInProgress); Assert.assertTrue(dStatus.status == LongRunningProcessStatus.Status.NOT_RUN); Assert.assertTrue(dStatus.lastError.isEmpty()); }
@Override public void close() { if (_executionFuture != null) { _executionFuture.cancel(true); } }
@Test public void shouldHandleMultipleRequests() { // Given: QueryContext queryContext = QueryContextConverterUtils.getQueryContext("SELECT strCol, intCol FROM tbl"); DataSchema schema = new DataSchema(new String[]{"strCol", "intCol"}, new DataSchema.ColumnDataType[]{DataSchema.ColumnDataType.STRING, DataSchema.ColumnDataType.INT}); List<BaseResultsBlock> dataBlocks = Arrays.asList( new SelectionResultsBlock(schema, Arrays.asList(new Object[]{"foo", 1}, new Object[]{"", 2}), queryContext), new SelectionResultsBlock(schema, Arrays.asList(new Object[]{"bar", 3}, new Object[]{"foo", 4}), queryContext)); InstanceResponseBlock metadataBlock = new InstanceResponseBlock(new MetadataResultsBlock()); QueryExecutor queryExecutor = mockQueryExecutor(dataBlocks, metadataBlock); LeafStageTransferableBlockOperator operator = new LeafStageTransferableBlockOperator(OperatorTestUtil.getTracingContext(), mockQueryRequests(2), schema, queryExecutor, _executorService); _operatorRef.set(operator); // Then: the 5th block should be EOS Assert.assertTrue(operator.nextBlock().isDataBlock()); Assert.assertTrue(operator.nextBlock().isDataBlock()); Assert.assertTrue(operator.nextBlock().isDataBlock()); Assert.assertTrue(operator.nextBlock().isDataBlock()); Assert.assertTrue(operator.nextBlock().isEndOfStreamBlock(), "Expected EOS after reading 5 blocks"); operator.close(); }
@Override public ActionResult apply(Agent agent, Map<String, String> input) { log.debug("Fetching url {} for agent {}", input.get("url"), agent.getId()); String url = input.get("url"); if (url == null || url.isEmpty()) { return errorResult("An error occurred while attempting to browse a site", "The url parameter is missing or has an empty value."); } try { return browsePage(url, input.get("query")); } catch (Exception e) { log.warn("Browsing error for {}", url, e); return errorResult( String.format("An error occured while attempting to browse %s", url), "Browsing failed, you should try another site."); } }
@Test void testApplyWithMissingUrl() { Map<String, String> input = new HashMap<>(); input.put("query", "What is this page about?"); ActionResult result = playwrightBrowserAction.apply(agent, input); assertEquals(ActionResult.Status.FAILURE, result.getStatus()); assertEquals("An error occurred while attempting to browse a site", result.getSummary()); assertEquals("The url parameter is missing or has an empty value.", result.getError()); }
@Override @DSTransactional // 多数据源,使用 @DSTransactional 保证本地事务,以及数据源的切换 public void updateTenantPackage(TenantPackageSaveReqVO updateReqVO) { // 校验存在 TenantPackageDO tenantPackage = validateTenantPackageExists(updateReqVO.getId()); // 更新 TenantPackageDO updateObj = BeanUtils.toBean(updateReqVO, TenantPackageDO.class); tenantPackageMapper.updateById(updateObj); // 如果菜单发生变化,则修改每个租户的菜单 if (!CollUtil.isEqualList(tenantPackage.getMenuIds(), updateReqVO.getMenuIds())) { List<TenantDO> tenants = tenantService.getTenantListByPackageId(tenantPackage.getId()); tenants.forEach(tenant -> tenantService.updateTenantRoleMenu(tenant.getId(), updateReqVO.getMenuIds())); } }
@Test public void testUpdateTenantPackage_notExists() { // 准备参数 TenantPackageSaveReqVO reqVO = randomPojo(TenantPackageSaveReqVO.class); // 调用, 并断言异常 assertServiceException(() -> tenantPackageService.updateTenantPackage(reqVO), TENANT_PACKAGE_NOT_EXISTS); }
public ManagedProcess launch(AbstractCommand command) { EsInstallation esInstallation = command.getEsInstallation(); if (esInstallation != null) { cleanupOutdatedEsData(esInstallation); writeConfFiles(esInstallation); } Process process; if (command instanceof JavaCommand<?> javaCommand) { process = launchJava(javaCommand); } else { throw new IllegalStateException("Unexpected type of command: " + command.getClass()); } ProcessId processId = command.getProcessId(); try { if (processId == ProcessId.ELASTICSEARCH) { checkArgument(esInstallation != null, "Incorrect configuration EsInstallation is null"); EsConnectorImpl esConnector = new EsConnectorImpl(singleton(HostAndPort.fromParts(esInstallation.getHost(), esInstallation.getHttpPort())), esInstallation.getBootstrapPassword(), esInstallation.getHttpKeyStoreLocation(), esInstallation.getHttpKeyStorePassword().orElse(null)); return new EsManagedProcess(process, processId, esConnector); } else { ProcessCommands commands = allProcessesCommands.createAfterClean(processId.getIpcIndex()); return new ProcessCommandsManagedProcess(process, processId, commands); } } catch (Exception e) { // just in case if (process != null) { process.destroyForcibly(); } throw new IllegalStateException(format("Fail to launch monitor of process [%s]", processId.getHumanReadableName()), e); } }
@Test public void enabling_es_security_should_execute_keystore_cli_if_truststore_and_keystore_provided() throws Exception { File tempDir = temp.newFolder(); File truststoreFile = temp.newFile("truststore.pk12"); File keystoreFile = temp.newFile("keystore.pk12"); TestProcessBuilder processBuilder = new TestProcessBuilder(); ProcessLauncher underTest = new ProcessLauncherImpl(tempDir, commands, () -> processBuilder); EsInstallation esInstallation = createEsInstallation(new Props(new Properties()) .set("sonar.cluster.enabled", "true") .set("sonar.cluster.search.password", "bootstrap-password") .set("sonar.cluster.es.ssl.keystore", keystoreFile.getAbsolutePath()) .set("sonar.cluster.es.ssl.keystorePassword", "keystore-password") .set("sonar.cluster.es.ssl.truststore", truststoreFile.getAbsolutePath()) .set("sonar.cluster.es.ssl.truststorePassword", "truststore-password")); JavaCommand<JvmOptions> command = new JavaCommand<>(ProcessId.ELASTICSEARCH, temp.newFolder()); command.addClasspath("lib/*.class"); command.addClasspath("lib/*.jar"); command.setArgument("foo", "bar"); command.setClassName("org.sonarqube.Main"); command.setEnvVariable("VAR1", "valueOfVar1"); command.setJvmOptions(new JvmOptions<>() .add("-Dfoo=bar") .add("-Dfoo2=bar2")); command.setEsInstallation(esInstallation); ManagedProcess monitor = underTest.launch(command); assertThat(monitor).isNotNull(); assertThat(Paths.get(esInstallation.getConfDirectory().getAbsolutePath(), "truststore.pk12")).exists(); assertThat(Paths.get(esInstallation.getConfDirectory().getAbsolutePath(), "keystore.pk12")).exists(); }