focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Optional<ErrorResponse> filter(DiscFilterRequest request) { try { Optional<ResourceNameAndAction> resourceMapping = requestResourceMapper.getResourceNameAndAction(request); log.log(Level.FINE, () -> String.format("Resource mapping for '%s': %s", request, resourceMapping)); if (resourceMapping.isEmpty()) { incrementAcceptedMetrics(request, false, Optional.empty()); return Optional.empty(); } Result result = checkAccessAllowed(request, resourceMapping.get()); AuthorizationResult.Type resultType = result.zpeResult.type(); setAttribute(request, RESULT_ATTRIBUTE, resultType.name()); if (resultType == AuthorizationResult.Type.ALLOW) { populateRequestWithResult(request, result); incrementAcceptedMetrics(request, true, Optional.of(result)); return Optional.empty(); } log.log(Level.FINE, () -> String.format("Forbidden (403) for '%s': %s", request, resultType.name())); incrementRejectedMetrics(request, FORBIDDEN, resultType.name(), Optional.of(result)); return Optional.of(new ErrorResponse(FORBIDDEN, "Access forbidden: " + resultType.getDescription())); } catch (IllegalArgumentException e) { log.log(Level.FINE, () -> String.format("Unauthorized (401) for '%s': %s", request, e.getMessage())); incrementRejectedMetrics(request, UNAUTHORIZED, "Unauthorized", Optional.empty()); return Optional.of(new ErrorResponse(UNAUTHORIZED, e.getMessage())); } }
@Test void reports_metrics_for_accepted_requests() { MetricMock metric = new MetricMock(); AthenzAuthorizationFilter filter = createFilter(new AllowingZpe(), List.of(EnabledCredentials.ACCESS_TOKEN), metric, null); MockResponseHandler responseHandler = new MockResponseHandler(); DiscFilterRequest request = createRequest(null, ACCESS_TOKEN, USER_IDENTITY_CERTIFICATE); filter.filter(request, responseHandler); assertMetrics(metric, ACCEPTED_METRIC_NAME, Map.of("authz-required", "true")); }
@Override public <VR> KTable<K, VR> aggregate(final Initializer<VR> initializer, final Aggregator<? super K, ? super V, VR> aggregator, final Materialized<K, VR, KeyValueStore<Bytes, byte[]>> materialized) { return aggregate(initializer, aggregator, NamedInternal.empty(), materialized); }
@Test public void shouldNotHaveInvalidStoreNameOnAggregate() { assertThrows(TopologyException.class, () -> groupedStream.aggregate( MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, Materialized.as(INVALID_STORE_NAME))); }
public static DateTimeFormatter createDateTimeFormatter(String format, Mode mode) { DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); boolean formatContainsHourOfAMPM = false; for (Token token : tokenize(format)) { switch (token.getType()) { case DateFormat.TEXT: builder.appendLiteral(token.getText()); break; case DateFormat.DD: builder.appendValue(DAY_OF_MONTH, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.HH24: builder.appendValue(HOUR_OF_DAY, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.HH: builder.appendValue(HOUR_OF_AMPM, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); formatContainsHourOfAMPM = true; break; case DateFormat.MI: builder.appendValue(MINUTE_OF_HOUR, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.MM: builder.appendValue(MONTH_OF_YEAR, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.SS: builder.appendValue(SECOND_OF_MINUTE, mode.getMinTwoPositionFieldWidth(), 2, NOT_NEGATIVE); break; case DateFormat.YY: builder.appendValueReduced(YEAR, 2, 2, 2000); break; case DateFormat.YYYY: builder.appendValue(YEAR, 4); break; case DateFormat.UNRECOGNIZED: default: throw new PrestoException( StandardErrorCode.INVALID_FUNCTION_ARGUMENT, String.format("Failed to tokenize string [%s] at offset [%d]", token.getText(), token.getCharPositionInLine())); } } try { // Append default values(0) for time fields(HH24, HH, MI, SS) because JSR-310 does not accept bare Date value as DateTime if (formatContainsHourOfAMPM) { // At the moment format does not allow to include AM/PM token, thus it was never possible to specify PM hours using 'HH' token in format // Keep existing behaviour by defaulting to 0(AM) for AMPM_OF_DAY if format string contains 'HH' builder.parseDefaulting(HOUR_OF_AMPM, 0) .parseDefaulting(AMPM_OF_DAY, 0); } else { builder.parseDefaulting(HOUR_OF_DAY, 0); } return builder.parseDefaulting(MINUTE_OF_HOUR, 0) .parseDefaulting(SECOND_OF_MINUTE, 0) .toFormatter(); } catch (UnsupportedOperationException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, e); } }
@Test public void testCreateDateTimeParser() { DateTimeFormatter formatter = DateFormatParser.createDateTimeFormatter("yyyy/mm/dd", PARSER); assertEquals(LocalDateTime.parse("1988/04/08", formatter), LocalDateTime.of(1988, 4, 8, 0, 0)); }
public CompletableFuture<ChangeInvisibleDurationResponse> changeInvisibleDuration(ProxyContext ctx, ChangeInvisibleDurationRequest request) { CompletableFuture<ChangeInvisibleDurationResponse> future = new CompletableFuture<>(); try { validateTopicAndConsumerGroup(request.getTopic(), request.getGroup()); validateInvisibleTime(Durations.toMillis(request.getInvisibleDuration())); ReceiptHandle receiptHandle = ReceiptHandle.decode(request.getReceiptHandle()); String group = request.getGroup().getName(); MessageReceiptHandle messageReceiptHandle = messagingProcessor.removeReceiptHandle(ctx, grpcChannelManager.getChannel(ctx.getClientID()), group, request.getMessageId(), receiptHandle.getReceiptHandle()); if (messageReceiptHandle != null) { receiptHandle = ReceiptHandle.decode(messageReceiptHandle.getReceiptHandleStr()); } return this.messagingProcessor.changeInvisibleTime( ctx, receiptHandle, request.getMessageId(), group, request.getTopic().getName(), Durations.toMillis(request.getInvisibleDuration()) ).thenApply(ackResult -> convertToChangeInvisibleDurationResponse(ctx, request, ackResult)); } catch (Throwable t) { future.completeExceptionally(t); } return future; }
@Test public void testChangeInvisibleDurationActivityFailed() throws Throwable { ArgumentCaptor<Long> invisibleTimeArgumentCaptor = ArgumentCaptor.forClass(Long.class); AckResult ackResult = new AckResult(); ackResult.setStatus(AckStatus.NO_EXIST); when(this.messagingProcessor.changeInvisibleTime( any(), any(), anyString(), anyString(), anyString(), invisibleTimeArgumentCaptor.capture() )).thenReturn(CompletableFuture.completedFuture(ackResult)); ChangeInvisibleDurationResponse response = this.changeInvisibleDurationActivity.changeInvisibleDuration( createContext(), ChangeInvisibleDurationRequest.newBuilder() .setInvisibleDuration(Durations.fromSeconds(3)) .setTopic(Resource.newBuilder().setName(TOPIC).build()) .setGroup(Resource.newBuilder().setName(CONSUMER_GROUP).build()) .setMessageId("msgId") .setReceiptHandle(buildReceiptHandle(TOPIC, System.currentTimeMillis(), 3000)) .build() ).get(); assertEquals(Code.INTERNAL_SERVER_ERROR, response.getStatus().getCode()); assertEquals(TimeUnit.SECONDS.toMillis(3), invisibleTimeArgumentCaptor.getValue().longValue()); }
@Nonnull public static <T> AggregateOperation1<T, LongAccumulator, Long> summingLong( @Nonnull ToLongFunctionEx<? super T> getLongValueFn ) { checkSerializable(getLongValueFn, "getLongValueFn"); return AggregateOperation .withCreate(LongAccumulator::new) .andAccumulate((LongAccumulator a, T item) -> a.add(getLongValueFn.applyAsLong(item))) .andCombine(LongAccumulator::add) .andDeduct(LongAccumulator::subtract) .andExportFinish(LongAccumulator::get); }
@Test public void when_summingLong() { validateOp(summingLong(Long::longValue), LongAccumulator::get, 1L, 2L, 1L, 3L, 3L); }
@Override public void trace(String msg) { logger.trace(msg); }
@Test void testMarkerTraceWithFormat2() { jobRunrDashboardLogger.trace(marker, "trace with {} {}", "format1", "format2"); verify(slfLogger).trace(marker, "trace with {} {}", "format1", "format2"); }
public static StringUtils.Pair splitByFirstLineFeed(String text) { String left = ""; String right = ""; if (text != null) { int pos = text.indexOf('\n'); if (pos != -1) { left = text.substring(0, pos).trim(); right = text.substring(pos).trim(); } else { left = text.trim(); } } return StringUtils.pair(left, right); }
@Test void testSplitByFirstLineFeed() { assertEquals(new Pair("", ""), StringUtils.splitByFirstLineFeed(null)); assertEquals(new Pair("foo", ""), StringUtils.splitByFirstLineFeed("foo")); assertEquals(new Pair("foo", "bar"), StringUtils.splitByFirstLineFeed("foo\nbar")); }
public static ConfigurableResource parseResourceConfigValue(String value) throws AllocationConfigurationException { return parseResourceConfigValue(value, Long.MAX_VALUE); }
@Test public void testParseNewStyleDuplicateMemoryDefinitionPercentage() throws Exception { expectInvalidResourcePercentageNewStyle("40% 80%"); parseResourceConfigValue("vcores = 75%, memory-mb = 40% 80%"); }
public void writeEncodedValue(EncodedValue encodedValue) throws IOException { switch (encodedValue.getValueType()) { case ValueType.BOOLEAN: writer.write(Boolean.toString(((BooleanEncodedValue) encodedValue).getValue())); break; case ValueType.BYTE: writer.write( String.format("0x%x", ((ByteEncodedValue)encodedValue).getValue())); break; case ValueType.CHAR: writer.write( String.format("0x%x", (int)((CharEncodedValue)encodedValue).getValue())); break; case ValueType.SHORT: writer.write( String.format("0x%x", ((ShortEncodedValue)encodedValue).getValue())); break; case ValueType.INT: writer.write( String.format("0x%x", ((IntEncodedValue)encodedValue).getValue())); break; case ValueType.LONG: writer.write( String.format("0x%x", ((LongEncodedValue)encodedValue).getValue())); break; case ValueType.FLOAT: writer.write(Float.toString(((FloatEncodedValue)encodedValue).getValue())); break; case ValueType.DOUBLE: writer.write(Double.toString(((DoubleEncodedValue)encodedValue).getValue())); break; case ValueType.ANNOTATION: writeAnnotation((AnnotationEncodedValue)encodedValue); break; case ValueType.ARRAY: writeArray((ArrayEncodedValue)encodedValue); break; case ValueType.STRING: writeQuotedString(((StringEncodedValue)encodedValue).getValue()); break; case ValueType.FIELD: writeFieldDescriptor(((FieldEncodedValue)encodedValue).getValue()); break; case ValueType.ENUM: writeFieldDescriptor(((EnumEncodedValue)encodedValue).getValue()); break; case ValueType.METHOD: writeMethodDescriptor(((MethodEncodedValue)encodedValue).getValue()); break; case ValueType.TYPE: writeType(((TypeEncodedValue)encodedValue).getValue()); break; case ValueType.METHOD_TYPE: writeMethodProtoDescriptor(((MethodTypeEncodedValue)encodedValue).getValue()); break; case ValueType.METHOD_HANDLE: writeMethodHandle(((MethodHandleEncodedValue)encodedValue).getValue()); break; case ValueType.NULL: writer.write("null"); break; default: throw new IllegalArgumentException("Unknown encoded value type"); } }
@Test public void testWriteEncodedValue_null() throws IOException { DexFormattedWriter writer = new DexFormattedWriter(output); writer.writeEncodedValue(ImmutableNullEncodedValue.INSTANCE); Assert.assertEquals( "null", output.toString()); }
@Override public void resetToCheckpoint(final long checkpointId, @Nullable final byte[] checkpointData) { // First bump up the coordinator epoch to fence out the active coordinator. LOG.info("Resetting coordinator to checkpoint."); // Replace the coordinator variable with a new DeferrableCoordinator instance. // At this point the internal coordinator of the new coordinator has not been created. // After this point all the subsequent calls will be made to the new coordinator. final DeferrableCoordinator oldCoordinator = coordinator; final DeferrableCoordinator newCoordinator = new DeferrableCoordinator(context.getOperatorId()); coordinator = newCoordinator; // Close the old coordinator asynchronously in a separate closing thread. // The future will be completed when the old coordinator closes. CompletableFuture<Void> closingFuture = oldCoordinator.closeAsync(closingTimeoutMs); // Create and possibly start the coordinator and apply all meanwhile deferred calls // capture the status whether the coordinator was started when this method was called final boolean wasStarted = this.started; closingFuture.whenComplete( (ignored, e) -> { if (e != null) { LOG.warn( String.format( "Received exception when closing " + "operator coordinator for %s.", oldCoordinator.operatorId), e); } if (!closed) { // The previous coordinator has closed. Create a new one. newCoordinator.createNewInternalCoordinator(context, provider); newCoordinator.resetAndStart(checkpointId, checkpointData, wasStarted); newCoordinator.processPendingCalls(); } }); }
@Test void testResetToCheckpointTimeout() throws Exception { final long closingTimeoutMs = 1L; // Let the user coordinator block on close. TestingCoordinatorProvider provider = new TestingCoordinatorProvider(new CountDownLatch(1)); MockOperatorCoordinatorContext context = new MockOperatorCoordinatorContext(OPERATOR_ID, NUM_SUBTASKS); RecreateOnResetOperatorCoordinator coordinator = (RecreateOnResetOperatorCoordinator) provider.create(context, closingTimeoutMs); coordinator.resetToCheckpoint(2L, new byte[0]); CommonTestUtils.waitUtil( context::isJobFailed, Duration.ofSeconds(5), "The job should fail due to resetToCheckpoint() timeout."); }
public void configure(URL url) throws Exception { InputStream in = null; try { URLConnection urlConnection = url.openConnection(); urlConnection.setUseCaches(false); in = urlConnection.getInputStream(); if (hasNewDoConfigureApi()) { doConfigure(in, url.toExternalForm()); } else { // adapter old version of logback below 1.1.10 doConfigure(in); } } catch (IOException ioe) { String errMsg = "Could not open URL [" + url + "]."; addError(errMsg, ioe); throw new JoranException(errMsg, ioe); } finally { if (in != null) { try { in.close(); } catch (IOException ioe) { String errMsg = "Could not close input stream"; addError(errMsg, ioe); throw new JoranException(errMsg, ioe); } } } }
@Test void testConfigureWithError() throws Exception { doThrow(new IOException("test")).when(inputStream).close(); try { nacosLogbackConfiguratorAdapter.configure(url); } catch (JoranException e) { List<Status> statusList = context.getStatusManager().getCopyOfStatusList(); assertFalse(statusList.isEmpty()); assertTrue(statusList.get(statusList.size() - 1) instanceof ErrorStatus); assertEquals("Could not close input stream", statusList.get(statusList.size() - 1).getMessage()); } }
@NonNull @Override public Object instantiateItem(@NonNull ViewGroup container, int position) { View root = mLayoutInflater.inflate(R.layout.quick_text_popup_autorowkeyboard_view, container, false); ScrollViewWithDisable scrollViewWithDisable = root.findViewById(R.id.scroll_root_for_quick_test_keyboard); scrollViewWithDisable.setPadding( scrollViewWithDisable.getPaddingLeft(), scrollViewWithDisable.getPaddingTop(), scrollViewWithDisable.getPaddingRight(), scrollViewWithDisable.getPaddingBottom() + mBottomPadding); container.addView(root); final QuickKeysKeyboardView keyboardView = root.findViewById(R.id.keys_container); keyboardView.setKeyboardTheme(mKeyboardTheme); keyboardView.setOnPopupShownListener( new PopupKeyboardShownHandler(mViewPager, scrollViewWithDisable)); keyboardView.setOnKeyboardActionListener(mKeyboardActionListener); QuickTextKey addOn = mAddOns[position]; AnyPopupKeyboard keyboard = mPopupKeyboards[position]; if (keyboard == null || position == 0 /*ALWAYS re-create history, in case it has changed*/) { if (addOn.isPopupKeyboardUsed()) { keyboard = new AnyPopupKeyboard( addOn, mContext, addOn.getPopupKeyboardResId(), keyboardView.getThemedKeyboardDimens(), addOn.getName(), mDefaultSkinTonePrefTracker.getDefaultSkinTone(), mDefaultGenderPrefTracker.getDefaultGender()); } else { keyboard = new PopupListKeyboard( mDefaultLocalAddOn, mContext, keyboardView.getThemedKeyboardDimens(), addOn.getPopupListNames(), addOn.getPopupListValues(), addOn.getName()); } mPopupKeyboards[position] = keyboard; final int keyboardViewMaxWidth = keyboardView.getThemedKeyboardDimens().getKeyboardMaxWidth(); mIsAutoFitKeyboards[position] = keyboard.getMinWidth() > keyboardViewMaxWidth || addOn instanceof HistoryQuickTextKey; if (mIsAutoFitKeyboards[position]) { // fixing up the keyboard, so it will fit nicely in the width int currentY = 0; int xSub = 0; for (Keyboard.Key key : keyboard.getKeys()) { key.y = currentY; key.x -= xSub; if (Keyboard.Key.getEndX(key) > keyboardViewMaxWidth) { currentY += key.height; xSub += key.x; key.y = currentY; key.x = 0; } } keyboard.resetDimensions(); } } keyboardView.setKeyboard(keyboard); return root; }
@Test @Config(shadows = ShadowAnyKeyboardViewWithMiniKeyboard.class) public void testPopupListenerDisable() throws Exception { ViewGroup container = new LinearLayout(getApplicationContext()); Object instance0 = mUnderTest.instantiateItem(container, 0); final QuickKeysKeyboardView keyboardView0 = ((View) instance0).findViewById(R.id.keys_container); ShadowAnyKeyboardViewWithMiniKeyboard shadow = Shadow.extract(keyboardView0); Assert.assertNotNull(shadow.mPopupShownListener); Mockito.verify(mViewPager, Mockito.never()).setEnabled(Mockito.anyBoolean()); shadow.mPopupShownListener.onPopupKeyboardShowingChanged(true); Mockito.verify(mViewPager).setEnabled(false); Mockito.verifyNoMoreInteractions(mViewPager); Mockito.reset(mViewPager); shadow.mPopupShownListener.onPopupKeyboardShowingChanged(false); Mockito.verify(mViewPager).setEnabled(true); Mockito.verifyNoMoreInteractions(mViewPager); }
@Override public void stop() { if (!isStarted()) return; try { runner.stop(); super.stop(); } catch (IOException ex) { addError("server shutdown error: " + ex, ex); } }
@Test public void testStopWhenNotStarted() throws Exception { appender.stop(); Assertions.assertEquals(0, runner.getStartCount()); }
@Nonnull public static Number shiftRight(@Nonnull Number value, @Nonnull Number shift) { // Check for widest types first, go down the type list to narrower types until reaching int. if (value instanceof Long) { return value.longValue() >> shift.longValue(); } else { return value.intValue() >> shift.intValue(); } }
@Test void testShiftRight() { assertEquals(16 >> 1, NumberUtil.shiftRight(16, 1)); assertEquals(16L >> 1, NumberUtil.shiftRight(16L, 1)); }
public static Write<String> writeStrings() { return Write.newBuilder( (ValueInSingleWindow<String> stringAndWindow) -> new PubsubMessage( stringAndWindow.getValue().getBytes(StandardCharsets.UTF_8), ImmutableMap.of())) .setDynamicDestinations(false) .build(); }
@Test public void testWriteWithPubsubGrpcClientFactory() { String topic = "projects/project/topics/topic"; PubsubIO.Write<?> write = PubsubIO.writeStrings() .to(topic) .withClientFactory(PubsubGrpcClient.FACTORY) .withTimestampAttribute("myTimestamp") .withIdAttribute("myId"); DisplayData displayData = DisplayData.from(write); assertThat(displayData, hasDisplayItem("topic", topic)); assertThat(displayData, hasDisplayItem("timestampAttribute", "myTimestamp")); assertThat(displayData, hasDisplayItem("idAttribute", "myId")); }
@Description("cosine") @ScalarFunction @SqlType(StandardTypes.DOUBLE) public static double cos(@SqlType(StandardTypes.DOUBLE) double num) { return Math.cos(num); }
@Test public void testCos() { for (double doubleValue : DOUBLE_VALUES) { assertFunction("cos(" + doubleValue + ")", DOUBLE, Math.cos(doubleValue)); assertFunction("cos(REAL '" + (float) doubleValue + "')", DOUBLE, Math.cos((float) doubleValue)); } assertFunction("cos(NULL)", DOUBLE, null); }
@SuppressWarnings("CollectionWithoutInitialCapacity") public static Collection<IndexMetaData> load(final Connection connection, final String table) throws SQLException { Map<String, IndexMetaData> result = new HashMap<>(); try (ResultSet resultSet = connection.getMetaData().getIndexInfo(connection.getCatalog(), connection.getSchema(), table, false, false)) { while (resultSet.next()) { String indexName = resultSet.getString(INDEX_NAME); if (null == indexName) { continue; } if (!result.containsKey(indexName)) { IndexMetaData indexMetaData = new IndexMetaData(indexName); indexMetaData.getColumns().add(resultSet.getString("COLUMN_NAME")); indexMetaData.setUnique(!resultSet.getBoolean("NON_UNIQUE")); result.put(indexName, indexMetaData); } else { result.get(indexName).getColumns().add(resultSet.getString("COLUMN_NAME")); } } } catch (final SQLException ex) { if (ORACLE_VIEW_NOT_APPROPRIATE_VENDOR_CODE != ex.getErrorCode()) { throw ex; } } return result.values(); }
@Test void assertLoad() throws SQLException { Collection<IndexMetaData> actual = IndexMetaDataLoader.load(mockConnection(), "tbl"); assertThat(actual.size(), is(1)); IndexMetaData indexMetaData = actual.iterator().next(); assertThat(indexMetaData.getName(), is("my_index")); }
public static <K, E, V> Collector<E, ImmutableSetMultimap.Builder<K, V>, ImmutableSetMultimap<K, V>> unorderedFlattenIndex( Function<? super E, K> keyFunction, Function<? super E, Stream<V>> valueFunction) { verifyKeyAndValueFunctions(keyFunction, valueFunction); BiConsumer<ImmutableSetMultimap.Builder<K, V>, E> accumulator = (map, element) -> { K key = requireNonNull(keyFunction.apply(element), KEY_FUNCTION_CANT_RETURN_NULL_MESSAGE); Stream<V> valueStream = requireNonNull(valueFunction.apply(element), VALUE_FUNCTION_CANT_RETURN_NULL_MESSAGE); valueStream.forEach(value -> map.put(key, value)); }; BinaryOperator<ImmutableSetMultimap.Builder<K, V>> merger = (m1, m2) -> { for (Map.Entry<K, V> entry : m2.build().entries()) { m1.put(entry.getKey(), entry.getValue()); } return m1; }; return Collector.of( ImmutableSetMultimap::builder, accumulator, merger, ImmutableSetMultimap.Builder::build); }
@Test public void unorderedFlattenIndex_with_valueFunction_fails_if_key_function_is_null() { assertThatThrownBy(() -> unorderedFlattenIndex(null, MyObj2::getTexts)) .isInstanceOf(NullPointerException.class) .hasMessage("Key function can't be null"); }
@Override public List<FileEntriesLayer> createLayers() throws IOException { // Clear the exploded-artifact root first if (Files.exists(targetExplodedWarRoot)) { MoreFiles.deleteRecursively(targetExplodedWarRoot, RecursiveDeleteOption.ALLOW_INSECURE); } ZipUtil.unzip(warPath, targetExplodedWarRoot, true); Predicate<Path> isFile = Files::isRegularFile; Predicate<Path> isInWebInfLib = path -> path.startsWith(targetExplodedWarRoot.resolve("WEB-INF").resolve("lib")); Predicate<Path> isSnapshot = path -> path.getFileName().toString().contains("SNAPSHOT"); // Non-snapshot layer Predicate<Path> isInWebInfLibAndIsNotSnapshot = isInWebInfLib.and(isSnapshot.negate()); FileEntriesLayer nonSnapshotLayer = ArtifactLayers.getDirectoryContentsAsLayer( ArtifactLayers.DEPENDENCIES, targetExplodedWarRoot, isFile.and(isInWebInfLibAndIsNotSnapshot), appRoot); // Snapshot layer Predicate<Path> isInWebInfLibAndIsSnapshot = isInWebInfLib.and(isSnapshot); FileEntriesLayer snapshotLayer = ArtifactLayers.getDirectoryContentsAsLayer( ArtifactLayers.SNAPSHOT_DEPENDENCIES, targetExplodedWarRoot, isFile.and(isInWebInfLibAndIsSnapshot), appRoot); // Classes layer. Predicate<Path> isClass = path -> path.getFileName().toString().endsWith(".class"); Predicate<Path> isInWebInfClasses = path -> path.startsWith(targetExplodedWarRoot.resolve("WEB-INF").resolve("classes")); Predicate<Path> classesPredicate = isInWebInfClasses.and(isClass); FileEntriesLayer classesLayer = ArtifactLayers.getDirectoryContentsAsLayer( ArtifactLayers.CLASSES, targetExplodedWarRoot, classesPredicate, appRoot); // Resources layer. Predicate<Path> resourcesPredicate = isInWebInfLib.or(isClass).negate(); FileEntriesLayer resourcesLayer = ArtifactLayers.getDirectoryContentsAsLayer( ArtifactLayers.RESOURCES, targetExplodedWarRoot, isFile.and(resourcesPredicate), appRoot); ArrayList<FileEntriesLayer> layers = new ArrayList<>(); if (!nonSnapshotLayer.getEntries().isEmpty()) { layers.add(nonSnapshotLayer); } if (!snapshotLayer.getEntries().isEmpty()) { layers.add(snapshotLayer); } if (!resourcesLayer.getEntries().isEmpty()) { layers.add(resourcesLayer); } if (!classesLayer.getEntries().isEmpty()) { layers.add(classesLayer); } return layers; }
@Test public void testCreateLayers_allLayers_correctExtractionPaths() throws IOException, URISyntaxException { // Prepare war file for test Path tempDirectory = temporaryFolder.getRoot().toPath(); Path warContents = Paths.get(Resources.getResource("war/standard/allLayers").toURI()); Path standardWar = zipUpDirectory(warContents, tempDirectory.resolve("standardWar.war")); Path explodedWarDestination = temporaryFolder.newFolder("exploded-war").toPath(); StandardWarExplodedProcessor processor = new StandardWarExplodedProcessor(standardWar, explodedWarDestination, APP_ROOT); List<FileEntriesLayer> layers = processor.createLayers(); assertThat(layers.size()).isEqualTo(4); FileEntriesLayer nonSnapshotLayer = layers.get(0); FileEntriesLayer snapshotLayer = layers.get(1); FileEntriesLayer resourcesLayer = layers.get(2); FileEntriesLayer classesLayer = layers.get(3); assertThat(nonSnapshotLayer.getEntries()) .comparingElementsUsing(EXTRACTION_PATH_OF) .containsExactly("/my/app/WEB-INF/lib/dependency-1.0.0.jar"); assertThat(snapshotLayer.getEntries()) .comparingElementsUsing(EXTRACTION_PATH_OF) .containsExactly("/my/app/WEB-INF/lib/dependencyX-1.0.0-SNAPSHOT.jar"); assertThat(resourcesLayer.getEntries()) .comparingElementsUsing(EXTRACTION_PATH_OF) .containsExactly( "/my/app/META-INF/context.xml", "/my/app/Test.jsp", "/my/app/WEB-INF/web.xml", "/my/app/WEB-INF/classes/package/test.properties"); assertThat(classesLayer.getEntries()) .comparingElementsUsing(EXTRACTION_PATH_OF) .containsExactly( "/my/app/WEB-INF/classes/MyClass2.class", "/my/app/WEB-INF/classes/package/MyClass.class"); }
public Stream<Hit> stream() { if (nPostingLists == 0) { return Stream.empty(); } return StreamSupport.stream(new PredicateSpliterator(), false); }
@Test void requireThatShorterPostingListEndingIsOk() { PredicateSearch search = createPredicateSearch( new byte[]{1, 1, 1}, postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(0, 0x000100ff), entry(1, 0x000100ff)), postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(2, 0x000100ff))); assertEquals(List.of(new Hit(0), new Hit(1), new Hit(2)).toString(), search.stream().toList().toString()); }
public Optional<RawFlag> resolve(FetchVector fetchVector) { return rules.stream() .filter(rule -> rule.match(defaultFetchVector.with(fetchVector))) .findFirst() .flatMap(Rule::getValueToApply); }
@Test void testResolve() { // Second rule matches with the default zone matching verify(Optional.of("false"), vector); // First rule matches only if both conditions match verify(Optional.of("false"), vector .with(Dimension.HOSTNAME, "host1") .with(Dimension.INSTANCE_ID, "app2")); verify(Optional.of("true"), vector .with(Dimension.HOSTNAME, "host1") .with(Dimension.INSTANCE_ID, "app3")); // Verify unsetting a dimension with null works. verify(Optional.of("true"), vector .with(Dimension.HOSTNAME, "host1") .with(Dimension.INSTANCE_ID, "app3") .with(Dimension.INSTANCE_ID, null)); // No rules apply if zone is overridden to an unknown zone verify(Optional.empty(), vector.with(Dimension.ZONE_ID, "unknown zone")); }
public static ServerHealthState error(String message, String description, HealthStateType type) { return new ServerHealthState(HealthStateLevel.ERROR, type, escapeHtml4(message), escapeHtml4(description)); }
@Test public void shouldUnderstandEquality() { ServerHealthState fooError = ServerHealthState.error("my message", "my description", HealthStateType.general(HealthStateScope.forPipeline("foo"))); ServerHealthState fooErrorCopy = ServerHealthState.error("my message", "my description", HealthStateType.general(HealthStateScope.forPipeline("foo"))); assertThat(fooError, is(fooErrorCopy)); }
@Override public Map<String, Object> batchInsertOrUpdate(List<ConfigAllInfo> configInfoList, String srcUser, String srcIp, Map<String, Object> configAdvanceInfo, SameConfigPolicy policy) throws NacosException { int succCount = 0; int skipCount = 0; List<Map<String, String>> failData = null; List<Map<String, String>> skipData = null; for (int i = 0; i < configInfoList.size(); i++) { ConfigAllInfo configInfo = configInfoList.get(i); try { ParamUtils.checkParam(configInfo.getDataId(), configInfo.getGroup(), "datumId", configInfo.getContent()); } catch (NacosException e) { LogUtil.DEFAULT_LOG.error("data verification failed", e); throw e; } ConfigInfo configInfo2Save = new ConfigInfo(configInfo.getDataId(), configInfo.getGroup(), configInfo.getTenant(), configInfo.getAppName(), configInfo.getContent()); configInfo2Save.setEncryptedDataKey( configInfo.getEncryptedDataKey() == null ? StringUtils.EMPTY : configInfo.getEncryptedDataKey()); String type = configInfo.getType(); if (StringUtils.isBlank(type)) { // simple judgment of file type based on suffix if (configInfo.getDataId().contains(SPOT)) { String extName = configInfo.getDataId().substring(configInfo.getDataId().lastIndexOf(SPOT) + 1); FileTypeEnum fileTypeEnum = FileTypeEnum.getFileTypeEnumByFileExtensionOrFileType(extName); type = fileTypeEnum.getFileType(); } else { type = FileTypeEnum.getFileTypeEnumByFileExtensionOrFileType(null).getFileType(); } } if (configAdvanceInfo == null) { configAdvanceInfo = new HashMap<>(16); } configAdvanceInfo.put("type", type); configAdvanceInfo.put("desc", configInfo.getDesc()); boolean success; try { ConfigOperateResult configOperateResult = addConfigInfo(srcIp, srcUser, configInfo2Save, configAdvanceInfo); success = configOperateResult.isSuccess(); } catch (DataIntegrityViolationException ive) { success = false; } if (success) { succCount++; } else { // uniqueness constraint conflict or add config info fail. if (SameConfigPolicy.ABORT.equals(policy)) { failData = new ArrayList<>(); skipData = new ArrayList<>(); Map<String, String> faileditem = new HashMap<>(2); faileditem.put("dataId", configInfo2Save.getDataId()); faileditem.put("group", configInfo2Save.getGroup()); failData.add(faileditem); for (int j = (i + 1); j < configInfoList.size(); j++) { ConfigInfo skipConfigInfo = configInfoList.get(j); Map<String, String> skipitem = new HashMap<>(2); skipitem.put("dataId", skipConfigInfo.getDataId()); skipitem.put("group", skipConfigInfo.getGroup()); skipData.add(skipitem); skipCount++; } break; } else if (SameConfigPolicy.SKIP.equals(policy)) { skipCount++; if (skipData == null) { skipData = new ArrayList<>(); } Map<String, String> skipitem = new HashMap<>(2); skipitem.put("dataId", configInfo2Save.getDataId()); skipitem.put("group", configInfo2Save.getGroup()); skipData.add(skipitem); } else if (SameConfigPolicy.OVERWRITE.equals(policy)) { succCount++; updateConfigInfo(configInfo2Save, srcIp, srcUser, configAdvanceInfo); } } } Map<String, Object> result = new HashMap<>(4); result.put("succCount", succCount); result.put("skipCount", skipCount); if (failData != null && !failData.isEmpty()) { result.put("failData", failData); } if (skipData != null && !skipData.isEmpty()) { result.put("skipData", skipData); } return result; }
@Test void testBatchInsertOrUpdateAbort() throws NacosException { List<ConfigAllInfo> configInfoList = new ArrayList<>(); //insert direct configInfoList.add(createMockConfigAllInfo(0)); //exist config and overwrite configInfoList.add(createMockConfigAllInfo(1)); //insert direct configInfoList.add(createMockConfigAllInfo(2)); String srcUser = "srcUser1324"; String srcIp = "srcIp1243"; Map<String, Object> configAdvanceInfo = new HashMap<>(); //mock transactionTemplate and replace TransactionTemplate transactionTemplateCurrent = Mockito.mock(TransactionTemplate.class); ReflectionTestUtils.setField(externalConfigInfoPersistService, "tjt", transactionTemplateCurrent); //mock add config 1 success,config 2 fail and abort,config 3 not operated Mockito.when(transactionTemplateCurrent.execute(any())).thenReturn(new ConfigOperateResult(true), new ConfigOperateResult(false)); Map<String, Object> stringObjectMap = externalConfigInfoPersistService.batchInsertOrUpdate(configInfoList, srcUser, srcIp, configAdvanceInfo, SameConfigPolicy.ABORT); assertEquals(1, stringObjectMap.get("succCount")); assertEquals(1, stringObjectMap.get("skipCount")); // config 2 failed assertEquals(configInfoList.get(1).getDataId(), ((List<Map<String, String>>) stringObjectMap.get("failData")).get(0).get("dataId")); //skip config 3 assertEquals(configInfoList.get(2).getDataId(), ((List<Map<String, String>>) stringObjectMap.get("skipData")).get(0).get("dataId")); }
private static void cpSubsystemConfig(XmlGenerator gen, Config config) { CPSubsystemConfig cpSubsystemConfig = config.getCPSubsystemConfig(); gen.open("cp-subsystem") .node("cp-member-count", cpSubsystemConfig.getCPMemberCount()) .node("group-size", cpSubsystemConfig.getGroupSize()) .node("session-time-to-live-seconds", cpSubsystemConfig.getSessionTimeToLiveSeconds()) .node("session-heartbeat-interval-seconds", cpSubsystemConfig.getSessionHeartbeatIntervalSeconds()) .node("missing-cp-member-auto-removal-seconds", cpSubsystemConfig.getMissingCPMemberAutoRemovalSeconds()) .node("fail-on-indeterminate-operation-state", cpSubsystemConfig.isFailOnIndeterminateOperationState()) .node("persistence-enabled", cpSubsystemConfig.isPersistenceEnabled()) .node("base-dir", cpSubsystemConfig.getBaseDir().getAbsolutePath()) .node("data-load-timeout-seconds", cpSubsystemConfig.getDataLoadTimeoutSeconds()) .node("cp-member-priority", cpSubsystemConfig.getCPMemberPriority()) .node("map-limit", cpSubsystemConfig.getCPMapLimit()); RaftAlgorithmConfig raftAlgorithmConfig = cpSubsystemConfig.getRaftAlgorithmConfig(); gen.open("raft-algorithm") .node("leader-election-timeout-in-millis", raftAlgorithmConfig.getLeaderElectionTimeoutInMillis()) .node("leader-heartbeat-period-in-millis", raftAlgorithmConfig.getLeaderHeartbeatPeriodInMillis()) .node("max-missed-leader-heartbeat-count", raftAlgorithmConfig.getMaxMissedLeaderHeartbeatCount()) .node("append-request-max-entry-count", raftAlgorithmConfig.getAppendRequestMaxEntryCount()) .node("commit-index-advance-count-to-snapshot", raftAlgorithmConfig.getCommitIndexAdvanceCountToSnapshot()) .node("uncommitted-entry-count-to-reject-new-appends", raftAlgorithmConfig.getUncommittedEntryCountToRejectNewAppends()) .node("append-request-backoff-timeout-in-millis", raftAlgorithmConfig.getAppendRequestBackoffTimeoutInMillis()) .close(); gen.open("semaphores"); for (SemaphoreConfig semaphoreConfig : cpSubsystemConfig.getSemaphoreConfigs().values()) { gen.open("semaphore") .node("name", semaphoreConfig.getName()) .node("jdk-compatible", semaphoreConfig.isJDKCompatible()) .node("initial-permits", semaphoreConfig.getInitialPermits()) .close(); } gen.close().open("locks"); for (FencedLockConfig lockConfig : cpSubsystemConfig.getLockConfigs().values()) { gen.open("fenced-lock") .node("name", lockConfig.getName()) .node("lock-acquire-limit", lockConfig.getLockAcquireLimit()) .close(); } gen.close().open("maps"); for (CPMapConfig cpMapConfig : cpSubsystemConfig.getCpMapConfigs().values()) { gen.open("map") .node("name", cpMapConfig.getName()) .node("max-size-mb", cpMapConfig.getMaxSizeMb()) .close(); } gen.close().close(); }
@Test public void testCPSubsystemConfig() { Config config = new Config(); config.getCPSubsystemConfig() .setCPMemberCount(10) .setGroupSize(5) .setSessionTimeToLiveSeconds(15) .setSessionHeartbeatIntervalSeconds(3) .setMissingCPMemberAutoRemovalSeconds(120) .setFailOnIndeterminateOperationState(true) .setPersistenceEnabled(true) .setBaseDir(new File("/custom-dir")) .setCPMemberPriority(-1); config.getCPSubsystemConfig() .getRaftAlgorithmConfig() .setLeaderElectionTimeoutInMillis(500) .setLeaderHeartbeatPeriodInMillis(100) .setMaxMissedLeaderHeartbeatCount(10) .setAppendRequestMaxEntryCount(25) .setAppendRequestMaxEntryCount(250) .setUncommittedEntryCountToRejectNewAppends(75) .setAppendRequestBackoffTimeoutInMillis(50); config.getCPSubsystemConfig() .addSemaphoreConfig(new SemaphoreConfig("sem1", true, 1)) .addSemaphoreConfig(new SemaphoreConfig("sem2", false, 2)); config.getCPSubsystemConfig() .addLockConfig(new FencedLockConfig("lock1", 1)) .addLockConfig(new FencedLockConfig("lock1", 2)); config.getCPSubsystemConfig() .addCPMapConfig(new CPMapConfig("map1", 50)) .addCPMapConfig(new CPMapConfig("map2", 25)); config.getCPSubsystemConfig().setCPMapLimit(30); CPSubsystemConfig generatedConfig = getNewConfigViaXMLGenerator(config).getCPSubsystemConfig(); assertTrue(generatedConfig + " should be compatible with " + config.getCPSubsystemConfig(), new CPSubsystemConfigChecker().check(config.getCPSubsystemConfig(), generatedConfig)); }
@Override public int hashCode() { return value.hashCode(); }
@Test public void testHashCode() { // Verify that the hashCode is equal when i==j, and usually not equal otherwise. int collisions = 0; for (int i = 0; i < TEST_KEYS.length; ++i) { int left = TEST_KEYS[i].hashCode(); int leftClone = ByteKey.copyFrom(TEST_KEYS[i].getValue()).hashCode(); assertEquals( String.format("Expected same hash code for %s and a copy of itself", TEST_KEYS[i]), left, leftClone); for (int j = i + 1; j < TEST_KEYS.length; ++j) { int right = TEST_KEYS[j].hashCode(); if (left == right) { ++collisions; } } } int totalUnequalTests = TEST_KEYS.length * (TEST_KEYS.length - 1) / 2; assertThat("Too many hash collisions", collisions, lessThan(totalUnequalTests / 2)); }
public static String decodeComponent(final String s) { return decodeComponent(s, HttpConstants.DEFAULT_CHARSET); }
@Test public void testUrlDecoding() throws Exception { final String caffe = new String( // "Caffé" but instead of putting the literal E-acute in the // source file, we directly use the UTF-8 encoding so as to // not rely on the platform's default encoding (not portable). new byte[] {'C', 'a', 'f', 'f', (byte) 0xC3, (byte) 0xA9}, "UTF-8"); final String[] tests = { // Encoded -> Decoded or error message substring "", "", "foo", "foo", "f+o", "f o", "f++", "f ", "fo%", "unterminated escape sequence at index 2 of: fo%", "%42", "B", "%5f", "_", "f%4", "unterminated escape sequence at index 1 of: f%4", "%x2", "invalid hex byte 'x2' at index 1 of '%x2'", "%4x", "invalid hex byte '4x' at index 1 of '%4x'", "Caff%C3%A9", caffe, "случайный праздник", "случайный праздник", "случайный%20праздник", "случайный праздник", "случайный%20праздник%20%E2%98%BA", "случайный праздник ☺", }; for (int i = 0; i < tests.length; i += 2) { final String encoded = tests[i]; final String expected = tests[i + 1]; try { final String decoded = QueryStringDecoder.decodeComponent(encoded); assertEquals(expected, decoded); } catch (IllegalArgumentException e) { assertEquals(expected, e.getMessage()); } } }
public static PrimitiveIterator.OfInt filter(int pageCount, IntPredicate filter) { return new IndexIterator(0, pageCount, filter, i -> i); }
@Test public void testFilter() { assertEquals(IndexIterator.filter(30, value -> value % 3 == 0), 0, 3, 6, 9, 12, 15, 18, 21, 24, 27); }
@Override public void rollback() throws SQLException { for (TransactionHook each : transactionHooks) { each.beforeRollback(connection.getCachedConnections().values(), getTransactionContext()); } if (connection.getConnectionSession().getTransactionStatus().isInTransaction()) { try { if (TransactionType.LOCAL == TransactionUtils.getTransactionType(getTransactionContext()) || null == distributionTransactionManager) { localTransactionManager.rollback(); } else { distributionTransactionManager.rollback(); } } finally { for (TransactionHook each : transactionHooks) { each.afterRollback(connection.getCachedConnections().values(), getTransactionContext()); } for (Connection each : connection.getCachedConnections().values()) { ConnectionSavepointManager.getInstance().transactionFinished(each); } connection.getConnectionSession().getTransactionStatus().setInTransaction(false); connection.getConnectionSession().getConnectionContext().close(); } } }
@Test void assertRollbackForLocalTransaction() throws SQLException { ContextManager contextManager = mockContextManager(TransactionType.LOCAL); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); newBackendTransactionManager(TransactionType.LOCAL, true); backendTransactionManager.rollback(); verify(transactionStatus).setInTransaction(false); verify(localTransactionManager).rollback(); }
@Override public Long getInteger( Object object ) throws KettleValueException { try { if ( isNull( object ) ) { return null; } switch ( type ) { case TYPE_INTEGER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return (Long) object; case STORAGE_TYPE_BINARY_STRING: return (Long) convertBinaryStringToNativeType( (byte[]) object ); case STORAGE_TYPE_INDEXED: return (Long) index[( (Integer) object ).intValue()]; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_STRING: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToInteger( (String) object ); case STORAGE_TYPE_BINARY_STRING: return convertStringToInteger( (String) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertStringToInteger( (String) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_NUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return new Long( Math.round( ( (Double) object ).doubleValue() ) ); case STORAGE_TYPE_BINARY_STRING: return new Long( Math.round( ( (Double) convertBinaryStringToNativeType( (byte[]) object ) ) .doubleValue() ) ); case STORAGE_TYPE_INDEXED: return new Long( Math.round( ( (Double) index[( (Integer) object ).intValue()] ).doubleValue() ) ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_DATE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertDateToInteger( (Date) object ); case STORAGE_TYPE_BINARY_STRING: return new Long( ( (Date) convertBinaryStringToNativeType( (byte[]) object ) ).getTime() ); case STORAGE_TYPE_INDEXED: return convertDateToInteger( (Date) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BIGNUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return new Long( ( (BigDecimal) object ).longValue() ); case STORAGE_TYPE_BINARY_STRING: return new Long( ( (BigDecimal) convertBinaryStringToNativeType( (byte[]) object ) ).longValue() ); case STORAGE_TYPE_INDEXED: return new Long( ( (BigDecimal) index[( (Integer) object ).intValue()] ).longValue() ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BOOLEAN: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertBooleanToInteger( (Boolean) object ); case STORAGE_TYPE_BINARY_STRING: return convertBooleanToInteger( (Boolean) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertBooleanToInteger( (Boolean) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BINARY: throw new KettleValueException( toString() + " : I don't know how to convert binary values to integers." ); case TYPE_SERIALIZABLE: throw new KettleValueException( toString() + " : I don't know how to convert serializable values to integers." ); default: throw new KettleValueException( toString() + " : Unknown type " + type + " specified." ); } } catch ( Exception e ) { throw new KettleValueException( "Unexpected conversion error while converting value [" + toString() + "] to an Integer", e ); } }
@Test( expected = KettleValueException.class ) public void testGetIntegerThrowsKettleValueException() throws KettleValueException { ValueMetaBase valueMeta = new ValueMetaInteger(); valueMeta.getInteger( "1234567890" ); }
public static BootstrapMetadata fromRecords(List<ApiMessageAndVersion> records, String source) { MetadataVersion metadataVersion = null; for (ApiMessageAndVersion record : records) { Optional<MetadataVersion> version = recordToMetadataVersion(record.message()); if (version.isPresent()) { metadataVersion = version.get(); } } if (metadataVersion == null) { throw new RuntimeException("No FeatureLevelRecord for " + MetadataVersion.FEATURE_NAME + " was found in the bootstrap metadata from " + source); } return new BootstrapMetadata(records, metadataVersion, source); }
@Test public void testFromRecordsListWithoutMetadataVersion() { assertEquals("No FeatureLevelRecord for metadata.version was found in the bootstrap " + "metadata from quux", assertThrows(RuntimeException.class, () -> BootstrapMetadata.fromRecords(emptyList(), "quux")).getMessage()); }
public CompletableFuture<Void> handlePullQuery( final ServiceContext serviceContext, final PullPhysicalPlan pullPhysicalPlan, final ConfiguredStatement<Query> statement, final RoutingOptions routingOptions, final PullQueryWriteStream pullQueryQueue, final CompletableFuture<Void> shouldCancelRequests ) { final List<KsqlPartitionLocation> allLocations = pullPhysicalPlan.getMaterialization().locator() .locate( pullPhysicalPlan.getKeys(), routingOptions, routingFilterFactory, pullPhysicalPlan.getPlanType() == PullPhysicalPlanType.RANGE_SCAN ); final Map<Integer, List<Host>> emptyPartitions = allLocations.stream() .filter(loc -> loc.getNodes().stream().noneMatch(node -> node.getHost().isSelected())) .collect(Collectors.toMap( KsqlPartitionLocation::getPartition, loc -> loc.getNodes().stream().map(KsqlNode::getHost).collect(Collectors.toList()))); if (!emptyPartitions.isEmpty()) { final MaterializationException materializationException = new MaterializationException( "Unable to execute pull query. " + emptyPartitions.entrySet() .stream() .map(kv -> String.format( "Partition %s failed to find valid host. Hosts scanned: %s", kv.getKey(), kv.getValue())) .collect(Collectors.joining(", ", "[", "]"))); LOG.debug(materializationException.getMessage()); throw materializationException; } // at this point we should filter out the hosts that we should not route to final List<KsqlPartitionLocation> locations = allLocations .stream() .map(KsqlPartitionLocation::removeFilteredHosts) .collect(Collectors.toList()); final CompletableFuture<Void> completableFuture = new CompletableFuture<>(); coordinatorExecutorService.submit(() -> { try { executeRounds(serviceContext, pullPhysicalPlan, statement, routingOptions, locations, pullQueryQueue, shouldCancelRequests); completableFuture.complete(null); } catch (Throwable t) { completableFuture.completeExceptionally(t); } }); return completableFuture; }
@Test public void shouldCallRouteQuery_allStandbysFail() { // Given: locate(location1, location2, location3, location4); doAnswer(i -> { throw new StandbyFallbackException("Error1!"); }).when(pullPhysicalPlan).execute(eq(ImmutableList.of(location1, location3)), any(), any()); when(ksqlClient.makeQueryRequest(eq(node2.location()), any(), any(), any(), any(), any(), any())) .thenAnswer(new Answer() { private int count = 0; public Object answer(InvocationOnMock i) { Map<String, ?> requestProperties = i.getArgument(3); PullQueryWriteStream rowConsumer = i.getArgument(4); if (++count == 1) { assertThat(requestProperties.get( KsqlRequestConfig.KSQL_REQUEST_QUERY_PULL_PARTITIONS), is("2,4")); rowConsumer.write( ImmutableList.of( StreamedRow.header(queryId, logicalSchema), StreamedRow.pullRow(GenericRow.fromList(ROW2), Optional.empty()))); } else { assertThat(requestProperties.get( KsqlRequestConfig.KSQL_REQUEST_QUERY_PULL_PARTITIONS), is("1,3")); throw new RuntimeException("Error2!"); } return RestResponse.successful(200, 2); } } ); // When: final Exception e = assertThrows( ExecutionException.class, () -> { CompletableFuture<Void> future = haRouting.handlePullQuery(serviceContext, pullPhysicalPlan, statement, routingOptions, pullQueryQueue, disconnect); future.get(); } ); // Then: verify(pullPhysicalPlan).execute(eq(ImmutableList.of(location1, location3)), any(), any()); verify(ksqlClient, times(2)).makeQueryRequest(eq(node2.location()), any(), any(), any(), any(), any(), any()); assertThat(e.getCause().getMessage(), containsString("Exhausted standby hosts to try.")); }
ReplicaInfo add(String bpid, ReplicaInfo replicaInfo) { checkBlockPool(bpid); checkBlock(replicaInfo); try (AutoCloseDataSetLock l = lockManager.readLock(LockLevel.BLOCK_POOl, bpid)) { LightWeightResizableGSet<Block, ReplicaInfo> m = map.get(bpid); if (m == null) { // Add an entry for block pool if it does not exist already map.putIfAbsent(bpid, new LightWeightResizableGSet<Block, ReplicaInfo>()); m = map.get(bpid); } return m.put(replicaInfo); } }
@Test public void testAdd() { // Test 1: null argument throws invalid argument exception try { map.add(bpid, null); fail("Expected exception not thrown"); } catch (IllegalArgumentException expected) { } }
@Operation(summary = "queryAllProjectList", description = "QUERY_ALL_PROJECT_LIST_NOTES") @GetMapping(value = "/list") @ResponseStatus(HttpStatus.OK) @ApiException(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR) public Result queryAllProjectList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { return projectService.queryAllProjectList(loginUser); }
@Test public void testQueryAllProjectList() { User user = new User(); user.setId(0); Result result = new Result(); putMsg(result, Status.SUCCESS); Mockito.when(projectService.queryAllProjectList(user)).thenReturn(result); Result response = projectController.queryAllProjectList(user); Assertions.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); }
public static Set<NodeDetails> parseNodesFromNodeFile( String nodeFile, Resource nmDefaultResource) throws IOException { Set<NodeDetails> nodeSet = new HashSet<>(); ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); Reader input = new InputStreamReader(new FileInputStream(nodeFile), StandardCharsets.UTF_8); try { Iterator<Map> i = mapper.readValues(mapper.createParser(input), Map.class); while (i.hasNext()) { Map jsonE = i.next(); String rack = "/" + jsonE.get("rack"); List tasks = (List) jsonE.get("nodes"); for (Object o : tasks) { Map jsonNode = (Map) o; NodeDetails nodeDetails = new NodeDetails( rack + "/" + jsonNode.get("node")); Resource nodeResource = Resources.clone(nmDefaultResource); ResourceInformation[] infors = ResourceUtils.getResourceTypesArray(); for (ResourceInformation info : infors) { if (jsonNode.get(info.getName()) != null) { nodeResource.setResourceValue(info.getName(), Integer.parseInt(jsonNode.get(info.getName()).toString())); } } nodeDetails.setNodeResource(nodeResource); if (jsonNode.get("labels") != null) { Set<NodeLabel> nodeLabels = new HashSet<>( YarnClientUtils.buildNodeLabelsFromStr( jsonNode.get("labels").toString())); nodeDetails.setLabels(nodeLabels); } nodeSet.add(nodeDetails); } } } finally { input.close(); } return nodeSet; }
@Test public void testParseNodesFromNodeFile() throws Exception { String nodeFile = "src/test/resources/nodes.json"; Set<NodeDetails> nodeDetails = SLSUtils.parseNodesFromNodeFile( nodeFile, Resources.createResource(1024, 2)); Assert.assertEquals(20, nodeDetails.size()); nodeFile = "src/test/resources/nodes-with-resources.json"; nodeDetails = SLSUtils.parseNodesFromNodeFile( nodeFile, Resources.createResource(1024, 2)); Assert.assertEquals(4, nodeDetails.size()); for (NodeDetails nodeDetail : nodeDetails) { if (nodeDetail.getHostname().equals("/rack1/node1")) { Assert.assertEquals(2048, nodeDetail.getNodeResource().getMemorySize()); Assert.assertEquals(6, nodeDetail.getNodeResource().getVirtualCores()); } else if (nodeDetail.getHostname().equals("/rack1/node2")) { Assert.assertEquals(1024, nodeDetail.getNodeResource().getMemorySize()); Assert.assertEquals(2, nodeDetail.getNodeResource().getVirtualCores()); Assert.assertNull(nodeDetail.getLabels()); } else if (nodeDetail.getHostname().equals("/rack1/node3")) { Assert.assertEquals(1024, nodeDetail.getNodeResource().getMemorySize()); Assert.assertEquals(2, nodeDetail.getNodeResource().getVirtualCores()); Assert.assertEquals(2, nodeDetail.getLabels().size()); for (NodeLabel nodeLabel : nodeDetail.getLabels()) { if (nodeLabel.getName().equals("label1")) { Assert.assertTrue(nodeLabel.isExclusive()); } else if(nodeLabel.getName().equals("label2")) { Assert.assertFalse(nodeLabel.isExclusive()); } else { Assert.fail("Unexpected label"); } } } else if (nodeDetail.getHostname().equals("/rack1/node4")) { Assert.assertEquals(6144, nodeDetail.getNodeResource().getMemorySize()); Assert.assertEquals(12, nodeDetail.getNodeResource().getVirtualCores()); Assert.assertEquals(2, nodeDetail.getLabels().size()); } } }
@InvokeOnHeader(Web3jConstants.ETH_PROTOCOL_VERSION) void ethProtocolVersion(Message message) throws IOException { Request<?, EthProtocolVersion> request = web3j.ethProtocolVersion(); setRequestId(message, request); EthProtocolVersion response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getProtocolVersion()); } }
@Test public void ethProtocolVersionTest() throws Exception { EthProtocolVersion response = Mockito.mock(EthProtocolVersion.class); Mockito.when(mockWeb3j.ethProtocolVersion()).thenReturn(request); Mockito.when(request.send()).thenReturn(response); Mockito.when(response.getProtocolVersion()).thenReturn("123"); Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.ETH_PROTOCOL_VERSION); template.send(exchange); String body = exchange.getIn().getBody(String.class); assertEquals("123", body); }
public Collection<String> getUsedConversionClasses(Schema schema) { Collection<String> result = new HashSet<>(); for (Conversion<?> conversion : getUsedConversions(schema)) { result.add(conversion.getClass().getCanonicalName()); } return result; }
@Test void getUsedConversionClassesForNullableTimestamps() throws Exception { SpecificCompiler compiler = createCompiler(); // timestamp-millis and timestamp-micros used to cause collisions when both were // present or added as converters (AVRO-2481). final Schema tsMillis = LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG)); final Schema tsMicros = LogicalTypes.timestampMicros().addToSchema(Schema.create(Schema.Type.LONG)); final Schema tsNanos = LogicalTypes.timestampNanos().addToSchema(Schema.create(Schema.Type.LONG)); final Collection<String> conversions = compiler.getUsedConversionClasses(SchemaBuilder.record("WithTimestamps") .fields().name("tsMillis").type(tsMillis).noDefault().name("tsMillisOpt").type().unionOf().nullType().and() .type(tsMillis).endUnion().noDefault().name("tsMicros").type(tsMicros).noDefault().name("tsMicrosOpt").type() .unionOf().nullType().and().type(tsMicros).endUnion().noDefault().name("tsNanos").type(tsNanos).noDefault() .name("tsNanosOpt").type().unionOf().nullType().and().type(tsNanos).endUnion().noDefault().endRecord()); assertEquals(3, conversions.size()); assertThat(conversions, hasItem("org.apache.avro.data.TimeConversions.TimestampMillisConversion")); assertThat(conversions, hasItem("org.apache.avro.data.TimeConversions.TimestampMicrosConversion")); assertThat(conversions, hasItem("org.apache.avro.data.TimeConversions.TimestampNanosConversion")); }
@Operation(summary = "Start collecting of Metadata for one or all connections ") @GetMapping(value = "collect_metadata/{id}", produces = "application/json") public Map<String, String> collectMetadata(@PathVariable("id") String id) throws CollectSamlMetadataException { return metadataProcessorService.collectSamlMetadata(id); }
@Test public void collectMetadata() throws CollectSamlMetadataException { Map<String, String> collectMetadata = new HashMap<>(); collectMetadata.put("count", "1"); when(metadataProcessorServiceMock.collectSamlMetadata(anyString())).thenReturn(collectMetadata); Map<String, String> result = controllerMock.collectMetadata("id"); verify(metadataProcessorServiceMock, times(1)).collectSamlMetadata(anyString()); assertEquals(result.size(), collectMetadata.size()); assertNotNull(result); }
@Override public PageResult<ApiErrorLogDO> getApiErrorLogPage(ApiErrorLogPageReqVO pageReqVO) { return apiErrorLogMapper.selectPage(pageReqVO); }
@Test public void testGetApiErrorLogPage() { // mock 数据 ApiErrorLogDO apiErrorLogDO = randomPojo(ApiErrorLogDO.class, o -> { o.setUserId(2233L); o.setUserType(UserTypeEnum.ADMIN.getValue()); o.setApplicationName("yudao-test"); o.setRequestUrl("foo"); o.setExceptionTime(buildTime(2021, 3, 13)); o.setProcessStatus(ApiErrorLogProcessStatusEnum.INIT.getStatus()); }); apiErrorLogMapper.insert(apiErrorLogDO); // 测试 userId 不匹配 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, o -> o.setUserId(3344L))); // 测试 userType 不匹配 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, o -> o.setUserType(UserTypeEnum.MEMBER.getValue()))); // 测试 applicationName 不匹配 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, o -> o.setApplicationName("test"))); // 测试 requestUrl 不匹配 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, o -> o.setRequestUrl("bar"))); // 测试 exceptionTime 不匹配:构造一个早期时间 2021-02-06 00:00:00 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, o -> o.setExceptionTime(buildTime(2021, 2, 6)))); // 测试 progressStatus 不匹配 apiErrorLogMapper.insert(cloneIgnoreId(apiErrorLogDO, logDO -> logDO.setProcessStatus(ApiErrorLogProcessStatusEnum.DONE.getStatus()))); // 准备参数 ApiErrorLogPageReqVO reqVO = new ApiErrorLogPageReqVO(); reqVO.setUserId(2233L); reqVO.setUserType(UserTypeEnum.ADMIN.getValue()); reqVO.setApplicationName("yudao-test"); reqVO.setRequestUrl("foo"); reqVO.setExceptionTime(buildBetweenTime(2021, 3, 1, 2021, 3, 31)); reqVO.setProcessStatus(ApiErrorLogProcessStatusEnum.INIT.getStatus()); // 调用 PageResult<ApiErrorLogDO> pageResult = apiErrorLogService.getApiErrorLogPage(reqVO); // 断言,只查到了一条符合条件的 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(apiErrorLogDO, pageResult.getList().get(0)); }
@Subscribe public void handleUserDeletedEvent(UserDeletedEvent event) { final Set<GRN> grantees; try (final Stream<GrantDTO> grantStream = grantService.streamAll()) { grantees = grantStream .map(GrantDTO::grantee) .filter(grantee -> grantee.grnType().equals(GRNTypes.USER)) .collect(Collectors.toSet()); } final Set<GRN> users; try (final Stream<UserOverviewDTO> userStream = userService.streamAll()) { users = userStream .map(user -> grnRegistry.newGRN(GRNTypes.USER.type(), user.id())) .collect(Collectors.toSet()); } final Sets.SetView<GRN> removedGrantees = Sets.difference(grantees, users); if (!removedGrantees.isEmpty()) { log.debug("Clearing grants for {} grantees ({}).", removedGrantees.size(), removedGrantees); removedGrantees.forEach(grantService::deleteForGrantee); } }
@Test void noGrants() { when(userService.streamAll()).thenReturn(Stream.of(userA)); when(grantService.streamAll()).thenReturn(Stream.empty()); cleanupListener.handleUserDeletedEvent(mock(UserDeletedEvent.class)); verify(grantService, never()).deleteForGrantee(any()); }
Set<SourceName> analyzeExpression( final Expression expression, final String clauseType ) { final Validator extractor = new Validator(clauseType); extractor.process(expression, null); return extractor.referencedSources; }
@Test public void shouldThrowOnPossibleSyntheticKeyColumnIfNotValidSyntheticKeyColumnName() { // Given: when(sourceSchemas.isJoin()).thenReturn(true); final Expression notSyntheticKey = new UnqualifiedColumnReferenceExp( ColumnName.of("NOT_ROWKEY") ); // When: final Exception e = assertThrows( UnknownColumnException.class, () -> analyzer.analyzeExpression(notSyntheticKey, "SELECT") ); // Then: assertThat(e.getMessage(), containsString( "SELECT column 'NOT_ROWKEY' cannot be resolved.")); }
public IcebergTableStats collectTableStats(String fqtn) { Table table = getTable(fqtn); TableStatsCollector tableStatsCollector; try { tableStatsCollector = new TableStatsCollector(fs(), spark, fqtn, table); } catch (IOException e) { log.error("Unable to initialize file system for table stats collection", e); return null; } IcebergTableStats tableStats = tableStatsCollector.collectTableStats(); return tableStats; }
@Test public void testCollectTableStats() throws Exception { final String tableName = "db.test_collect_table_stats"; final int numInserts = 3; try (Operations ops = Operations.withCatalog(getSparkSession(), meter)) { prepareTable(ops, tableName); IcebergTableStats stats = ops.collectTableStats(tableName); // Validate empty data files case Assertions.assertEquals(stats.getNumReferencedDataFiles(), 0); Assertions.assertEquals(stats.getNumExistingMetadataJsonFiles(), 1); long modifiedTimeStamp = System.currentTimeMillis(); populateTable(ops, tableName, 1); stats = ops.collectTableStats(tableName); Assertions.assertEquals(stats.getNumReferencedDataFiles(), 1); Assertions.assertTrue(stats.getTableLastUpdatedTimestamp() >= modifiedTimeStamp); // Capture first snapshot timestamp Table table = ops.getTable(tableName); long oldestSnapshot = table.currentSnapshot().timestampMillis(); // Add more records and validate other stats populateTable(ops, tableName, numInserts); table = ops.getTable(tableName); log.info("Loaded table {}, location {}", table.name(), table.location()); stats = ops.collectTableStats(tableName); Assertions.assertEquals(stats.getCurrentSnapshotId(), table.currentSnapshot().snapshotId()); Assertions.assertEquals(stats.getNumReferencedDataFiles(), numInserts + 1); Assertions.assertEquals(stats.getNumExistingMetadataJsonFiles(), numInserts + 2); Assertions.assertEquals( stats.getCurrentSnapshotTimestamp(), table.currentSnapshot().timestampMillis()); Assertions.assertEquals(stats.getOldestSnapshotTimestamp(), oldestSnapshot); Assertions.assertEquals( stats.getNumObjectsInDirectory(), stats.getNumReferencedDataFiles() + stats.getNumExistingMetadataJsonFiles() + stats.getNumReferencedManifestFiles() + stats.getNumReferencedManifestLists()); } }
@Override public long touch(String... names) { return commandExecutor.get(touchAsync(names)); }
@Test public void testTouch() { redisson.getSet("test").add("1"); redisson.getSet("test10").add("1"); assertThat(redisson.getKeys().touch("test")).isEqualTo(1); assertThat(redisson.getKeys().touch("test", "test2")).isEqualTo(1); assertThat(redisson.getKeys().touch("test3", "test2")).isEqualTo(0); assertThat(redisson.getKeys().touch("test3", "test10", "test")).isEqualTo(2); }
@Override public final void getSize(@NonNull SizeReadyCallback cb) { sizeDeterminer.getSize(cb); }
@Test public void testDoesNotNotifyCallbackTwiceIfAddedTwice() { target.getSize(cb); target.getSize(cb); view.setLayoutParams(new FrameLayout.LayoutParams(100, 100)); activity.visible(); view.getViewTreeObserver().dispatchOnPreDraw(); verify(cb, times(1)).onSizeReady(anyInt(), anyInt()); }
@Override public String named() { return PluginEnum.WEB_CLIENT.getName(); }
@Test public void testNamed() { assertEquals(PluginEnum.WEB_CLIENT.getName(), webClientPlugin.named()); }
public static <T> T checkFound(@Nullable T value, String message, Object... messageArguments) { if (value == null) { throw new NotFoundException(format(message, messageArguments)); } return value; }
@Test public void checkFound_type_throws_NotFoundException_if_parameter_is_null_and_formats_message() { String message = "foo %s"; assertThatExceptionOfType(NotFoundException.class) .isThrownBy(() -> checkFound(null, message, "bar")) .withMessage("foo bar"); }
@Override public void queryRoute(QueryRouteRequest request, StreamObserver<QueryRouteResponse> responseObserver) { Function<Status, QueryRouteResponse> statusResponseCreator = status -> QueryRouteResponse.newBuilder().setStatus(status).build(); ProxyContext context = createContext(); try { this.addExecutor(this.routeThreadPoolExecutor, context, request, () -> grpcMessingActivity.queryRoute(context, request) .whenComplete((response, throwable) -> writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)), responseObserver, statusResponseCreator); } catch (Throwable t) { writeResponse(context, request, null, responseObserver, t, statusResponseCreator); } }
@Test public void testQueryRouteWithBadClientID() { Metadata metadata = new Metadata(); metadata.put(GrpcConstants.LANGUAGE, JAVA); metadata.put(GrpcConstants.REMOTE_ADDRESS, REMOTE_ADDR); metadata.put(GrpcConstants.LOCAL_ADDRESS, LOCAL_ADDR); Assert.assertNotNull(Context.current() .withValue(GrpcConstants.METADATA, metadata) .attach()); QueryRouteRequest request = QueryRouteRequest.newBuilder() .setEndpoints(grpcEndpoints) .setTopic(Resource.newBuilder().setName(TOPIC).build()) .build(); grpcMessagingApplication.queryRoute(request, queryRouteResponseStreamObserver); ArgumentCaptor<QueryRouteResponse> responseArgumentCaptor = ArgumentCaptor.forClass(QueryRouteResponse.class); await().untilAsserted(() -> { Mockito.verify(queryRouteResponseStreamObserver, Mockito.times(1)).onNext(responseArgumentCaptor.capture()); }); assertEquals(Code.CLIENT_ID_REQUIRED, responseArgumentCaptor.getValue().getStatus().getCode()); }
public static boolean isPhone(String phone) { return isMatch("(\\+\\d+)?(\\d{3,4}\\-?)?\\d{7,8}$", phone); }
@Test public void testPhone() { Assert.assertEquals(true, PatternKit.isPhone("033-88888888")); Assert.assertEquals(true, PatternKit.isPhone("033-7777777")); Assert.assertEquals(true, PatternKit.isPhone("0444-88888888")); Assert.assertEquals(true, PatternKit.isPhone("0444-7777777")); Assert.assertEquals(true, PatternKit.isPhone("04447777777")); Assert.assertEquals(false, PatternKit.isPhone("133 88888888")); Assert.assertEquals(false, PatternKit.isPhone("033-666666")); Assert.assertEquals(false, PatternKit.isPhone("0444-999999999")); }
@Override public EncodedMessage transform(ActiveMQMessage message) throws Exception { if (message == null) { return null; } long messageFormat = 0; Header header = null; Properties properties = null; Map<Symbol, Object> daMap = null; Map<Symbol, Object> maMap = null; Map<String,Object> apMap = null; Map<Object, Object> footerMap = null; Section body = convertBody(message); if (message.isPersistent()) { if (header == null) { header = new Header(); } header.setDurable(true); } byte priority = message.getPriority(); if (priority != Message.DEFAULT_PRIORITY) { if (header == null) { header = new Header(); } header.setPriority(UnsignedByte.valueOf(priority)); } String type = message.getType(); if (type != null) { if (properties == null) { properties = new Properties(); } properties.setSubject(type); } MessageId messageId = message.getMessageId(); if (messageId != null) { if (properties == null) { properties = new Properties(); } properties.setMessageId(getOriginalMessageId(message)); } ActiveMQDestination destination = message.getDestination(); if (destination != null) { if (properties == null) { properties = new Properties(); } properties.setTo(destination.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination)); } ActiveMQDestination replyTo = message.getReplyTo(); if (replyTo != null) { if (properties == null) { properties = new Properties(); } properties.setReplyTo(replyTo.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo)); } String correlationId = message.getCorrelationId(); if (correlationId != null) { if (properties == null) { properties = new Properties(); } try { properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId)); } catch (AmqpProtocolException e) { properties.setCorrelationId(correlationId); } } long expiration = message.getExpiration(); if (expiration != 0) { long ttl = expiration - System.currentTimeMillis(); if (ttl < 0) { ttl = 1; } if (header == null) { header = new Header(); } header.setTtl(new UnsignedInteger((int) ttl)); if (properties == null) { properties = new Properties(); } properties.setAbsoluteExpiryTime(new Date(expiration)); } long timeStamp = message.getTimestamp(); if (timeStamp != 0) { if (properties == null) { properties = new Properties(); } properties.setCreationTime(new Date(timeStamp)); } // JMSX Message Properties int deliveryCount = message.getRedeliveryCounter(); if (deliveryCount > 0) { if (header == null) { header = new Header(); } header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount)); } String userId = message.getUserID(); if (userId != null) { if (properties == null) { properties = new Properties(); } properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8))); } String groupId = message.getGroupID(); if (groupId != null) { if (properties == null) { properties = new Properties(); } properties.setGroupId(groupId); } int groupSequence = message.getGroupSequence(); if (groupSequence > 0) { if (properties == null) { properties = new Properties(); } properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence)); } final Map<String, Object> entries; try { entries = message.getProperties(); } catch (IOException e) { throw JMSExceptionSupport.create(e); } for (Map.Entry<String, Object> entry : entries.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (key.startsWith(JMS_AMQP_PREFIX)) { if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) { messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class); continue; } else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } continue; } else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } continue; } else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (maMap == null) { maMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length()); maMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class)); continue; } else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class)); continue; } else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (daMap == null) { daMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length()); daMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (footerMap == null) { footerMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length()); footerMap.put(Symbol.valueOf(name), value); continue; } } else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) { // strip off the scheduled message properties continue; } // The property didn't map into any other slot so we store it in the // Application Properties section of the message. if (apMap == null) { apMap = new HashMap<>(); } apMap.put(key, value); int messageType = message.getDataStructureType(); if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) { // Type of command to recognize advisory message Object data = message.getDataStructure(); if(data != null) { apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName()); } } } final AmqpWritableBuffer buffer = new AmqpWritableBuffer(); encoder.setByteBuffer(buffer); if (header != null) { encoder.writeObject(header); } if (daMap != null) { encoder.writeObject(new DeliveryAnnotations(daMap)); } if (maMap != null) { encoder.writeObject(new MessageAnnotations(maMap)); } if (properties != null) { encoder.writeObject(properties); } if (apMap != null) { encoder.writeObject(new ApplicationProperties(apMap)); } if (body != null) { encoder.writeObject(body); } if (footerMap != null) { encoder.writeObject(new Footer(footerMap)); } return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength()); }
@Test public void testConvertEmptyBytesMessageToAmqpMessageWithDataBody() throws Exception { ActiveMQBytesMessage outbound = createBytesMessage(); outbound.onSend(); outbound.storeContent(); JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer(); EncodedMessage encoded = transformer.transform(outbound); assertNotNull(encoded); Message amqp = encoded.decode(); assertNotNull(amqp.getBody()); assertTrue(amqp.getBody() instanceof Data); assertTrue(((Data) amqp.getBody()).getValue() instanceof Binary); assertEquals(0, ((Data) amqp.getBody()).getValue().getLength()); }
@Override public void invoke(NamingEvent event) { logInvoke(event); if (listener instanceof AbstractEventListener && ((AbstractEventListener) listener).getExecutor() != null) { ((AbstractEventListener) listener).getExecutor().execute(() -> listener.onEvent(event)); } else { listener.onEvent(event); } }
@Test public void testEventListener() { EventListener listener = mock(EventListener.class); NamingListenerInvoker listenerInvoker = new NamingListenerInvoker(listener); NamingEvent event = new NamingEvent("serviceName", Collections.emptyList()); listenerInvoker.invoke(event); verify(listener).onEvent(event); }
@Override public long getPreferredBlockSize() { return HeaderFormat.getPreferredBlockSize(header); }
@Test public void testPreferredBlockSizeUpperBound () { replication = 3; preferredBlockSize = BLKSIZE_MAXVALUE; INodeFile inf = createINodeFile(replication, preferredBlockSize); assertEquals("True has to be returned in this case", BLKSIZE_MAXVALUE, inf.getPreferredBlockSize()); }
protected void removeConfig(Config config) { if (config instanceof IndirectDhcpRelayConfig) { v4Handler.setIndirectDhcpServerConfigs(Collections.emptyList()); v6Handler.setIndirectDhcpServerConfigs(Collections.emptyList()); } else if (config instanceof DefaultDhcpRelayConfig) { v4Handler.setDefaultDhcpServerConfigs(Collections.emptyList()); v6Handler.setDefaultDhcpServerConfigs(Collections.emptyList()); } if (config instanceof IgnoreDhcpConfig) { v4Handler.updateIgnoreVlanConfig(null); v6Handler.updateIgnoreVlanConfig(null); } }
@Test public void testRemoveIgnoreVlan() { v4Handler.ignoredVlans.put(DEV_1_ID, IGNORED_VLAN); v4Handler.ignoredVlans.put(DEV_2_ID, IGNORED_VLAN); v6Handler.ignoredVlans.put(DEV_1_ID, IGNORED_VLAN); v6Handler.ignoredVlans.put(DEV_2_ID, IGNORED_VLAN); IgnoreDhcpConfig config = new IgnoreDhcpConfig(); Capture<Objective> capturedFromDev1 = newCapture(CaptureType.ALL); flowObjectiveService.apply(eq(DEV_1_ID), capture(capturedFromDev1)); expectLastCall().times(DHCP_SELECTORS.size()); Capture<Objective> capturedFromDev2 = newCapture(CaptureType.ALL); flowObjectiveService.apply(eq(DEV_2_ID), capture(capturedFromDev2)); expectLastCall().times(DHCP_SELECTORS.size()); replay(flowObjectiveService); manager.removeConfig(config); verify(flowObjectiveService); List<Objective> objectivesFromDev1 = capturedFromDev1.getValues(); List<Objective> objectivesFromDev2 = capturedFromDev2.getValues(); assertTrue(objectivesFromDev1.containsAll(objectivesFromDev2)); assertTrue(objectivesFromDev2.containsAll(objectivesFromDev1)); for (int index = 0; index < objectivesFromDev1.size(); index++) { TrafficSelector selector = DefaultTrafficSelector.builder(DHCP_SELECTORS.get(index)) .matchVlanId(IGNORED_VLAN) .build(); ForwardingObjective fwd = (ForwardingObjective) objectivesFromDev1.get(index); assertEquals(selector, fwd.selector()); assertEquals(DefaultTrafficTreatment.emptyTreatment(), fwd.treatment()); assertEquals(IGNORE_CONTROL_PRIORITY, fwd.priority()); assertEquals(ForwardingObjective.Flag.VERSATILE, fwd.flag()); assertEquals(Objective.Operation.REMOVE, fwd.op()); fwd.context().ifPresent(ctx -> { ctx.onSuccess(fwd); }); } objectivesFromDev2.forEach(obj -> obj.context().ifPresent(ctx -> ctx.onSuccess(obj))); assertEquals(0, v4Handler.ignoredVlans.size()); assertEquals(0, v6Handler.ignoredVlans.size()); }
@Override public void onLeaderInformationChange(String componentId, LeaderInformation leaderInformation) { synchronized (lock) { notifyLeaderInformationChangeInternal( componentId, leaderInformation, confirmedLeaderInformation.forComponentIdOrEmpty(componentId)); } }
@Test void testAllLeaderInformationChangeEventWithUnknownComponentId() throws Exception { final AtomicReference<LeaderInformationRegister> storedLeaderInformation = new AtomicReference<>(); new Context(storedLeaderInformation) { { runTestWithSynchronousEventHandling( () -> { final UUID leaderSessionID = UUID.randomUUID(); grantLeadership(leaderSessionID); final LeaderInformationRegister correctLeaderInformationRegister = storedLeaderInformation.get(); assertThat(correctLeaderInformationRegister.getRegisteredComponentIds()) .containsExactlyInAnyOrder( contenderContext0.componentId, contenderContext1.componentId); // change LeaderInformation only affects an unregistered componentId final String unknownComponentId = createRandomComponentId(); final LeaderInformationRegister partiallyChangedLeaderInformationRegister = LeaderInformationRegister.merge( correctLeaderInformationRegister, unknownComponentId, LeaderInformation.known( UUID.randomUUID(), "address-for-" + unknownComponentId)); storedLeaderInformation.set(partiallyChangedLeaderInformationRegister); leaderElectionService.onLeaderInformationChange( partiallyChangedLeaderInformationRegister); assertThat(storedLeaderInformation.get()) .as( "The HA backend shouldn't have been touched by the service.") .isSameAs(partiallyChangedLeaderInformationRegister); }); } }; }
public static double getProjectionFactorToLine( final double pFromX, final double pFromY, final double pAX, final double pAY, final double pBX, final double pBY ) { if (pAX == pBX && pAY == pBY) { return 0; } return dotProduct(pAX, pAY, pBX, pBY, pFromX, pFromY) / getSquaredDistanceToPoint(pAX, pAY, pBX, pBY); }
@Test public void test_getProjectionFactorToLine() { final int xA = 100; final int yA = 200; Assert.assertEquals(0, Distance.getProjectionFactorToLine(xA, yA, xA, yA, xA, yA), mDelta); Assert.assertEquals(0, Distance.getProjectionFactorToLine(xA, yA, xA + 10, yA, xA + 10, yA), mDelta); Assert.assertEquals(0, Distance.getProjectionFactorToLine(xA, yA, xA, yA + 20, xA, yA + 20), mDelta); Assert.assertEquals(0, Distance.getProjectionFactorToLine(xA, yA + 20, xA, yA, xA + 100, yA), mDelta); Assert.assertEquals(-10. / 100, // < 0 Distance.getProjectionFactorToLine(xA - 10, yA - 30, xA, yA, xA + 100, yA), mDelta); Assert.assertEquals(2, // > 1 Distance.getProjectionFactorToLine(xA + 200, yA - 70, xA, yA, xA + 100, yA), mDelta); Assert.assertEquals(1, // 1 Distance.getProjectionFactorToLine(xA + 200, yA - 7000, xA, yA, xA + 200, yA), mDelta); Assert.assertEquals(.2, // ]0,1[ Distance.getProjectionFactorToLine(xA + 200, yA - 7000, xA, yA, xA + 1000, yA), mDelta); }
@Override public void remove(NamedNode master) { connection.sync(RedisCommands.SENTINEL_REMOVE, master.getName()); }
@Test public void testRemove() { Collection<RedisServer> masters = connection.masters(); connection.remove(masters.iterator().next()); }
public Statement buildStatement(final ParserRuleContext parseTree) { return build(Optional.of(getSources(parseTree)), parseTree); }
@Test public void shouldHandleAliasQualifiedSelectStarOnLeftJoinSource() { // Given: final SingleStatementContext stmt = givenQuery("SELECT T1.* FROM TEST1 T1 JOIN TEST2 WITHIN 1 SECOND ON T1.ID = TEST2.ID;"); // When: final Query result = (Query) builder.buildStatement(stmt); // Then: assertThat(result.getSelect(), is(new Select(ImmutableList.of(new AllColumns(Optional.of(SourceName.of("T1"))))))); }
@Override public boolean isSatisfied(int index, TradingRecord tradingRecord) { traceIsSatisfied(index, satisfied); return satisfied; }
@Test public void isSatisfied() { assertTrue(satisfiedRule.isSatisfied(0)); assertTrue(satisfiedRule.isSatisfied(1)); assertTrue(satisfiedRule.isSatisfied(2)); assertTrue(satisfiedRule.isSatisfied(10)); assertFalse(unsatisfiedRule.isSatisfied(0)); assertFalse(unsatisfiedRule.isSatisfied(1)); assertFalse(unsatisfiedRule.isSatisfied(2)); assertFalse(unsatisfiedRule.isSatisfied(10)); }
@Override public int actionLaunch(String fileName, String appName, Long lifetime, String queue) throws IOException, YarnException { int result = EXIT_SUCCESS; try { Service service = loadAppJsonFromLocalFS(fileName, appName, lifetime, queue); String buffer = jsonSerDeser.toJson(service); ClientResponse response = getApiClient() .post(ClientResponse.class, buffer); result = processResponse(response); } catch (Exception e) { LOG.error("Fail to launch application: ", e); result = EXIT_EXCEPTION_THROWN; } return result; }
@Test void testLaunch() { String fileName = "target/test-classes/example-app.json"; String appName = "example-app"; long lifetime = 3600L; String queue = "default"; try { int result = asc.actionLaunch(fileName, appName, lifetime, queue); assertEquals(EXIT_SUCCESS, result); } catch (IOException | YarnException e) { fail(); } }
@Override public void check(final String databaseName, final ReadwriteSplittingRuleConfiguration ruleConfig, final Map<String, DataSource> dataSourceMap, final Collection<ShardingSphereRule> builtRules) { checkDataSources(databaseName, ruleConfig.getDataSourceGroups(), dataSourceMap, builtRules); checkLoadBalancer(databaseName, ruleConfig); }
@SuppressWarnings({"rawtypes", "unchecked"}) @Test void assertInvalidCheck() { ReadwriteSplittingRuleConfiguration config = createInvalidConfiguration(); RuleConfigurationChecker checker = OrderedSPILoader.getServicesByClass(RuleConfigurationChecker.class, Collections.singleton(config.getClass())).get(config.getClass()); assertThrows(MissingRequiredReadwriteSplittingActualDataSourceException.class, () -> checker.check("test", config, Collections.emptyMap(), Collections.emptyList())); }
public String[] dir() throws KettleJobException { try { Vector<ChannelSftp.LsEntry> entries = channel.ls( "." ); if ( entries == null ) { return null; } List<String> files = entries.stream() .filter( lse -> lse != null && !lse.getAttrs().isDir() ) .map( ChannelSftp.LsEntry::getFilename ) .collect( Collectors.toList() ); // uses depend on being null when empty return files.isEmpty() ? null : files.toArray( new String[ files.size() ] ); } catch ( SftpException e ) { throw new KettleJobException( e ); } }
@Test public void testDir() throws Exception { Vector<ChannelSftp.LsEntry> files = new Vector<>(); files.add( mockLsEntry("file_1", false ) ); files.add( mockLsEntry("a_dir", true ) ); when( channel.ls( anyString() ) ).thenReturn( files ); SFTPClient client = spyClient(); client.login( password ); String[] dirs = client.dir(); assertEquals( 1, dirs.length ); assertEquals( "file_1", dirs[0] ); }
public boolean deferredActivation() { return this.deferredActivation; }
@Test void deferred_activation_is_disabled_by_default() { ClusterStateBundle bundle = createTestBundle(); assertFalse(bundle.deferredActivation()); }
public static Optional<ShardingConditionValue> generate(final ExpressionSegment predicate, final Column column, final List<Object> params, final TimestampServiceRule timestampServiceRule) { if (predicate instanceof BinaryOperationExpression) { return COMPARE_OPERATOR_GENERATOR.generate((BinaryOperationExpression) predicate, column, params, timestampServiceRule); } if (predicate instanceof InExpression) { return IN_OPERATOR_GENERATOR.generate((InExpression) predicate, column, params, timestampServiceRule); } if (predicate instanceof BetweenExpression) { return BETWEEN_OPERATOR_GENERATOR.generate((BetweenExpression) predicate, column, params, timestampServiceRule); } return Optional.empty(); }
@Test void assertGenerateBetweenExpression() { ConditionValueBetweenOperatorGenerator conditionValueBetweenOperatorGenerator = new ConditionValueBetweenOperatorGenerator(); ExpressionSegment betweenSegment = new LiteralExpressionSegment(0, 0, 1); ExpressionSegment andSegment = new LiteralExpressionSegment(0, 0, 2); Optional<ShardingConditionValue> actual = conditionValueBetweenOperatorGenerator.generate( new BetweenExpression(0, 0, null, betweenSegment, andSegment, false), column, new LinkedList<>(), mock(TimestampServiceRule.class)); Optional<ShardingConditionValue> expected = ConditionValueGeneratorFactory.generate( new BetweenExpression(0, 0, null, betweenSegment, andSegment, false), column, new LinkedList<>(), mock(TimestampServiceRule.class)); assertTrue(actual.isPresent() && expected.isPresent()); assertThat(actual.get().getColumnName(), is(expected.get().getColumnName())); assertThat(actual.get().getTableName(), is(expected.get().getTableName())); }
public static Expression generateFilterExpression(SearchArgument sarg) { return translate(sarg.getExpression(), sarg.getLeaves()); }
@Test public void testDecimalType() { SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); SearchArgument arg = builder.startAnd().equals("decimal", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("20.12")).end().build(); UnboundPredicate expected = Expressions.equal("decimal", new BigDecimal("20.12")); UnboundPredicate actual = (UnboundPredicate) HiveIcebergFilterFactory.generateFilterExpression(arg); assertPredicatesMatch(expected, actual); }
@Override public String name() { return name; }
@Test public void testName() { assertEquals(loggerName, logger.name()); }
@Override public Set<Long> calculateUsers(DelegateExecution execution, String param) { Set<Long> deptIds = StrUtils.splitToLongSet(param); List<AdminUserRespDTO> users = adminUserApi.getUserListByDeptIds(deptIds).getCheckedData(); return convertSet(users, AdminUserRespDTO::getId); }
@Test public void testCalculateUsers() { // 准备参数 String param = "11,22"; // mock 方法 List<AdminUserRespDTO> users = convertList(asSet(11L, 22L), id -> new AdminUserRespDTO().setId(id)); when(adminUserApi.getUserListByDeptIds(eq(asSet(11L, 22L)))).thenReturn(success(users)); // 调用 Set<Long> results = strategy.calculateUsers(null, param); // 断言 assertEquals(asSet(11L, 22L), results); }
@Override public boolean equals(Object object) { if (this == object) { return true; } if (!NodeTypeEnums.isBetaNode((NetworkNode)object) || this.hashCode() != object.hashCode()) { return false; } BetaNode other = (BetaNode) object; return this.getClass() == other.getClass() && this.constraints.equals( other.constraints ) && this.rightInputIsPassive == other.rightInputIsPassive && Objects.equals(this.leftListenedProperties, other.leftListenedProperties) && Objects.equals(this.rightListenedProperties, other.rightListenedProperties) && this.leftInput.getId() == other.leftInput.getId() && this.rightInput.getId() == other.rightInput.getId(); }
@Test public void testEqualsObject() { InternalRuleBase kBase = RuleBaseFactory.newRuleBase(); BuildContext buildContext = new BuildContext( kBase, Collections.emptyList() ); final LeftTupleSource ts = new MockTupleSource( 1, buildContext ); final ObjectSource os = new MockObjectSource( 2, buildContext ); final BetaNode j1 = new JoinNode( 1, ts, os, EmptyBetaConstraints.getInstance(), buildContext ); final BetaNode j2 = new JoinNode( 2, ts, os, EmptyBetaConstraints.getInstance(), buildContext ); final BetaNode n1 = new NotNode( 3, ts, os, EmptyBetaConstraints.getInstance(), buildContext ); final BetaNode n2 = new NotNode( 4, ts, os, EmptyBetaConstraints.getInstance(), buildContext ); assertThat(j1).isEqualTo(j1); assertThat(j2).isEqualTo(j2); assertThat(j2).isEqualTo(j1); assertThat(n1).isEqualTo(n1); assertThat(n2).isEqualTo(n2); assertThat(n2).isEqualTo(n1); assertThat(j1.equals(n1)).isFalse(); assertThat(j1.equals(n2)).isFalse(); assertThat(n1.equals(j1)).isFalse(); assertThat(n1.equals(j2)).isFalse(); }
public static Pair<List<Snapshot>, List<Snapshot>> symmetricDifferenceSplit( List<Snapshot> first, List<Snapshot> second) { Set<Long> firstSnapshotIds = first.stream().map(Snapshot::snapshotId).collect(Collectors.toSet()); Set<Long> secondSnapshotIds = second.stream().map(Snapshot::snapshotId).collect(Collectors.toSet()); return Pair.of( first.stream() .filter(s -> !secondSnapshotIds.contains(s.snapshotId())) .collect(Collectors.toList()), second.stream() .filter(s -> !firstSnapshotIds.contains(s.snapshotId())) .collect(Collectors.toList())); }
@Test public void testSymmetricDifference() { Assertions.assertEquals( Pair.of(Collections.emptyList(), Collections.emptyList()), SnapshotsUtil.symmetricDifferenceSplit(testSnapshots, testSnapshots)); Assertions.assertEquals( Pair.of(Collections.emptyList(), testSnapshots), SnapshotsUtil.symmetricDifferenceSplit(Collections.emptyList(), testSnapshots)); Assertions.assertEquals( Pair.of(testSnapshots, Collections.emptyList()), SnapshotsUtil.symmetricDifferenceSplit(testSnapshots, Collections.emptyList())); Assertions.assertEquals( Pair.of(testSnapshots.subList(0, 1), testSnapshots.subList(3, 4)), SnapshotsUtil.symmetricDifferenceSplit( testSnapshots.subList(0, 3), testSnapshots.subList(1, 4))); }
@CombineFunction public static void merge(@AggregationState DoubleHistogramAggregation.State state, @AggregationState DoubleHistogramAggregation.State other) { DoubleHistogramAggregation.merge(state, other); }
@Test public void testMerge() { Accumulator singleStep = factory.createAccumulator(UpdateMemory.NOOP); singleStep.addInput(input); Block singleStepResult = getFinalBlock(singleStep); Accumulator partialStep = factory.createAccumulator(UpdateMemory.NOOP); partialStep.addInput(input); Block intermediate = getIntermediateBlock(partialStep); Accumulator finalStep = factory.createAccumulator(UpdateMemory.NOOP); finalStep.addIntermediate(intermediate); finalStep.addIntermediate(intermediate); Block actual = getFinalBlock(finalStep); Map<Float, Float> expected = Maps.transformValues(extractSingleValue(singleStepResult), value -> value * 2); assertEquals(extractSingleValue(actual), expected); }
public static List<TargetInfo> parseOptTarget(CommandLine cmd, AlluxioConfiguration conf) throws IOException { String[] targets; if (cmd.hasOption(TARGET_OPTION_NAME)) { String argTarget = cmd.getOptionValue(TARGET_OPTION_NAME); if (StringUtils.isBlank(argTarget)) { throw new IOException("Option " + TARGET_OPTION_NAME + " can not be blank."); } else if (argTarget.contains(TARGET_SEPARATOR)) { targets = argTarget.split(TARGET_SEPARATOR); } else { targets = new String[]{argTarget}; } } else { // By default we set on all targets (master/workers/job_master/job_workers) targets = new String[]{ROLE_MASTER, ROLE_JOB_MASTER, ROLE_WORKERS, ROLE_JOB_WORKERS}; } return getTargetInfos(targets, conf); }
@Test public void parseJobWorkerTargets() throws Exception { CommandLine mockCommandLine = mock(CommandLine.class); String[] mockArgs = new String[]{"--target", "job_workers"}; when(mockCommandLine.getArgs()).thenReturn(mockArgs); when(mockCommandLine.hasOption(LogLevel.TARGET_OPTION_NAME)).thenReturn(true); when(mockCommandLine.getOptionValue(LogLevel.TARGET_OPTION_NAME)).thenReturn(mockArgs[1]); // Prepare a list of job workers List<JobWorkerHealth> jobWorkers = new ArrayList<>(); jobWorkers.add(new JobWorkerHealth(0, new ArrayList<>(), 10, 0, 0, "workers-1")); jobWorkers.add(new JobWorkerHealth(1, new ArrayList<>(), 10, 0, 0, "workers-2")); try (MockedStatic<JobMasterClient.Factory> mockFactory = mockStatic(JobMasterClient.Factory.class)) { JobMasterClient mockJobClient = mock(JobMasterClient.class); when(mockJobClient.getAllWorkerHealth()).thenReturn(jobWorkers); mockFactory.when(() -> JobMasterClient.Factory.create(any())).thenReturn(mockJobClient); List<LogLevel.TargetInfo> targets = LogLevel.parseOptTarget(mockCommandLine, mConf); assertEquals(2, targets.size()); assertEquals(new LogLevel.TargetInfo("workers-1", JOB_WORKER_WEB_PORT, "job_worker"), targets.get(0)); assertEquals(new LogLevel.TargetInfo("workers-2", JOB_WORKER_WEB_PORT, "job_worker"), targets.get(1)); } }
@SneakyThrows(NoSuchAlgorithmException.class) @Override public void channelRead(final ChannelHandlerContext ctx, final Object msg) { if (msg instanceof MySQLHandshakePacket) { MySQLHandshakePacket handshake = (MySQLHandshakePacket) msg; MySQLHandshakeResponse41Packet handshakeResponsePacket = new MySQLHandshakeResponse41Packet(MAX_PACKET_SIZE, CHARACTER_SET, username); handshakeResponsePacket.setAuthResponse(generateAuthResponse(handshake.getAuthPluginData().getAuthenticationPluginData())); handshakeResponsePacket.setCapabilityFlags(generateClientCapability()); handshakeResponsePacket.setAuthPluginName(MySQLAuthenticationMethod.NATIVE); ctx.channel().writeAndFlush(handshakeResponsePacket); serverVersion = new MySQLServerVersion(handshake.getServerVersion()); return; } if (msg instanceof MySQLAuthSwitchRequestPacket) { MySQLAuthSwitchRequestPacket authSwitchRequest = (MySQLAuthSwitchRequestPacket) msg; ctx.channel().writeAndFlush(new MySQLAuthSwitchResponsePacket(getAuthPluginResponse(authSwitchRequest))); seed = authSwitchRequest.getAuthPluginData().getAuthenticationPluginData(); return; } if (msg instanceof MySQLAuthMoreDataPacket) { MySQLAuthMoreDataPacket authMoreData = (MySQLAuthMoreDataPacket) msg; handleCachingSha2Auth(ctx, authMoreData); return; } if (msg instanceof MySQLOKPacket) { ctx.channel().pipeline().remove(this); authResultCallback.setSuccess(serverVersion); return; } MySQLErrPacket error = (MySQLErrPacket) msg; ctx.channel().close(); throw new PipelineInternalException(error.getErrorMessage()); }
@Test void assertChannelReadErrorPacket() { MySQLErrPacket errorPacket = new MySQLErrPacket( new SQLException(MySQLVendorError.ER_NO_DB_ERROR.getReason(), MySQLVendorError.ER_NO_DB_ERROR.getSqlState().getValue(), MySQLVendorError.ER_NO_DB_ERROR.getVendorCode())); assertThrows(RuntimeException.class, () -> mysqlNegotiateHandler.channelRead(channelHandlerContext, errorPacket)); }
public PluggableArtifactConfig findByArtifactId(String artifactId) { for (PluggableArtifactConfig artifact : getPluggableArtifactConfigs()) { if (artifact.getId().equals(artifactId)) { return artifact; } } return null; }
@Test public void findByArtifactId_shouldReturnPluggableArtifactConfigs() { ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs(); allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3")); allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker")); final PluggableArtifactConfig s3 = allConfigs.findByArtifactId("s3"); assertThat(s3, is(new PluggableArtifactConfig("s3", "cd.go.s3"))); }
public static Matches matches(String regex) { return matches(regex, 0); }
@Test @Category(NeedsRunner.class) public void testMatchesGroup() { PCollection<String> output = p.apply(Create.of("a", "x xxx", "x yyy", "x zzz")).apply(Regex.matches("x ([xyz]*)", 1)); PAssert.that(output).containsInAnyOrder("xxx", "yyy", "zzz"); p.run(); }
public static <T> Inner<T> create() { return new Inner<>(); }
@Test @Category(NeedsRunner.class) public void addSimpleFieldsDefaultValue() { Schema schema = Schema.builder().addStringField("field1").build(); PCollection<Row> added = pipeline .apply( Create.of(Row.withSchema(schema).addValue("value").build()).withRowSchema(schema)) .apply(AddFields.<Row>create().field("field2", Schema.FieldType.INT32, 42)); Schema expectedSchema = Schema.builder() .addStringField("field1") .addField("field2", Schema.FieldType.INT32) .build(); assertEquals(expectedSchema, added.getSchema()); Row expected = Row.withSchema(expectedSchema).addValues("value", 42).build(); PAssert.that(added).containsInAnyOrder(expected); pipeline.run(); }
Object getFromStep(String stepId, String paramName) { try { return executor .submit(() -> fromStep(stepId, paramName)) .get(TIMEOUT_IN_MILLIS, TimeUnit.MILLISECONDS); } catch (Exception e) { throw new MaestroInternalError( e, "getFromStep throws an exception for stepId=[%s], paramName=[%s]", stepId, paramName); } }
@Test public void testGetFromStepThrowsErrorWhenInvokedFromWorkflowExecContext() { when(instanceWrapper.isWorkflowParam()).thenReturn(true); AssertHelper.assertThrows( "Invalid field", MaestroInternalError.class, "getFromStep throws an exception fieldName=[step_id]", () -> paramExtension.getFromStep(Constants.STEP_ID_PARAM)); }
public FEELFnResult<String> invoke(@ParameterName("string") String string, @ParameterName("start position") Number start) { return invoke(string, start, null); }
@Test void invokeLengthOutOfListBounds() { FunctionTestUtil.assertResult(substringFunction.invoke("test", 2, 3), "est"); FunctionTestUtil.assertResult(substringFunction.invoke("test", -3, 3), "est"); }
@Override public Consumer createConsumer(final Processor processor) { throw new IllegalStateException("Dynamic Router is a producer-only component"); }
@Test void createConsumerError() { assertThrows(IllegalStateException.class, () -> endpoint.createConsumer(processor)); }
public RuntimeOptionsBuilder parse(Class<?> clazz) { RuntimeOptionsBuilder args = new RuntimeOptionsBuilder(); for (Class<?> classWithOptions = clazz; hasSuperClass( classWithOptions); classWithOptions = classWithOptions.getSuperclass()) { CucumberOptions options = requireNonNull(optionsProvider).getOptions(classWithOptions); if (options != null) { addDryRun(options, args); addMonochrome(options, args); addTags(classWithOptions, options, args); addPlugins(options, args); addPublish(options, args); addName(options, args); addSnippets(options, args); addGlue(options, args); addFeatures(options, args); addObjectFactory(options, args); addUuidGenerator(options, args); } } addDefaultFeaturePathIfNoFeaturePathIsSpecified(args, clazz); addDefaultGlueIfNoOverridingGlueIsSpecified(args, clazz); return args; }
@Test void inherit_plugin_from_baseclass() { RuntimeOptions runtimeOptions = parser().parse(SubClassWithFormatter.class).build(); Plugins plugins = new Plugins(new PluginFactory(), runtimeOptions); plugins.setEventBusOnEventListenerPlugins(new TimeServiceEventBus(Clock.systemUTC(), UUID::randomUUID)); List<Plugin> pluginList = plugins.getPlugins(); assertAll( () -> assertPluginExists(pluginList, HtmlFormatter.class.getName()), () -> assertPluginExists(pluginList, PrettyFormatter.class.getName())); }
@Override public Iterable<K> loadAllKeys() { // If loadAllKeys property is disabled, don't load anything if (!genericMapStoreProperties.loadAllKeys) { return Collections.emptyList(); } awaitSuccessfulInit(); String sql = queries.loadAllKeys(); SqlResult keysResult = sqlService.execute(sql); // The contract for loadAllKeys says that if iterator implements Closable // then it will be closed when the iteration is over return () -> new MappingClosingIterator<>( keysResult.iterator(), (SqlRow row) -> row.getObject(genericMapStoreProperties.idColumn), keysResult::close ); }
@Test public void givenRow_whenLoadAllKeys_thenReturnKeys() { ObjectSpec spec = objectProvider.createObject(mapName, false); mapLoader = createMapLoader(); objectProvider.insertItems(spec, 1); List<Integer> ids = newArrayList(mapLoader.loadAllKeys()); assertThat(ids).contains(0); }
@Override public String toString() { StringBuilder sb = new StringBuilder("{"); addField(sb, "\"componentUuid\": ", this.componentUuid, true); addField(sb, "\"componentKey\": ", this.componentKey, true); addField(sb, "\"componentName\": ", this.componentName, true); addField(sb, "\"qualifier\": ", getQualifier(qualifier), true); addField(sb, "\"description\": ", this.description, true); addField(sb, "\"path\": ", this.path, true); addField(sb, "\"isPrivate\": ", Objects.toString(this.isPrivate, ""), false); addField(sb, "\"isEnabled\": ", Objects.toString(this.isEnabled, ""), false); endString(sb); return sb.toString(); }
@Test void toString_project_uuid_and_name_and_isPrivate_withEscapedQuotes() { ComponentNewValue newValue = new ComponentNewValue("uuid", "the \"best\" name", "key", true, "TRK"); assertThat(newValue.toString()) .contains("\"componentUuid\": \"uuid\"") .contains("\"componentKey\": \"key\"") .contains("\"componentName\": \"the \\\"best\\\" name\"") .contains("\"qualifier\": \"project\"") .contains("\"isPrivate\": true"); }
public void generate() throws IOException { generateMessageHeaderStub(); final List<String> typesToInclude = generateTypeStubs(); for (final List<Token> tokens : ir.messages()) { final Token msgToken = tokens.get(0); final String className = formatClassName(msgToken.name()); final String stateClassName = className + "::CodecState"; final FieldPrecedenceModel fieldPrecedenceModel = precedenceChecks.createCodecModel(stateClassName, tokens); try (Writer out = outputManager.createOutput(className)) { final List<Token> messageBody = tokens.subList(1, tokens.size() - 1); int i = 0; final List<Token> fields = new ArrayList<>(); i = collectFields(messageBody, i, fields); final List<Token> groups = new ArrayList<>(); i = collectGroups(messageBody, i, groups); final List<Token> varData = new ArrayList<>(); collectVarData(messageBody, i, varData); out.append(generateFileHeader(ir.namespaces(), className, typesToInclude)); out.append(generateClassDeclaration(className)); out.append(generateMessageFlyweightCode(className, msgToken, fieldPrecedenceModel)); out.append(generateFullyEncodedCheck(fieldPrecedenceModel)); final StringBuilder sb = new StringBuilder(); generateFields(sb, className, fields, fieldPrecedenceModel, BASE_INDENT); generateGroups(sb, groups, fieldPrecedenceModel, BASE_INDENT); generateVarData(sb, className, varData, fieldPrecedenceModel, BASE_INDENT); generateDisplay(sb, msgToken.name(), fields, groups, varData); sb.append(generateMessageLength(groups, varData, BASE_INDENT)); sb.append("};\n"); generateLookupTableDefinitions(sb, className, fieldPrecedenceModel); sb.append(CppUtil.closingBraces(ir.namespaces().length)).append("#endif\n"); out.append(sb); } } }
@Test void shouldUseGeneratedLiteralForConstantOneWhenGeneratingBitsetCode() throws Exception { try (InputStream in = Tests.getLocalResource("issue827.xml")) { final ParserOptions options = ParserOptions.builder().stopOnError(true).build(); final MessageSchema schema = parse(in, options); final IrGenerator irg = new IrGenerator(); final Ir ir = irg.generate(schema); final StringWriterOutputManager outputManager = new StringWriterOutputManager(); outputManager.setPackageName(ir.applicableNamespace()); final CppGenerator generator = new CppGenerator(ir, false, outputManager); generator.generate(); final String source = outputManager.getSource("issue827.FlagsSet").toString(); assertThat(source, not(containsString("1u << "))); assertThat(source, containsString("UINT64_C(0x1) << ")); } }
public static boolean isPowerOfTwo(long x) { return (x & (x - 1)) == 0; }
@Test public void testIsPowerOfTwo() { assertTrue(QuickMath.isPowerOfTwo(1)); assertTrue(QuickMath.isPowerOfTwo(2)); assertFalse(QuickMath.isPowerOfTwo(3)); assertTrue(QuickMath.isPowerOfTwo(1024)); assertFalse(QuickMath.isPowerOfTwo(1023)); assertFalse(QuickMath.isPowerOfTwo(Integer.MAX_VALUE)); }
public void execute() { new PathAwareCrawler<>( FormulaExecutorComponentVisitor.newBuilder(metricRepository, measureRepository).buildFor(formulas)) .visit(treeRootHolder.getReportTreeRoot()); }
@Test public void compute_duplicated_lines_counts_lines_from_original_and_InnerDuplicate_only_once() { TextBlock original = new TextBlock(1, 12); duplicationRepository.addDuplication(FILE_1_REF, original, new TextBlock(10, 11), new TextBlock(11, 15)); duplicationRepository.addDuplication(FILE_1_REF, new TextBlock(2, 2), new TextBlock(96, 96)); underTest.execute(); assertRawMeasureValue(FILE_1_REF, DUPLICATED_LINES_KEY, 16); }
List<Token> tokenize() throws ScanException { List<Token> tokenList = new ArrayList<Token>(); StringBuffer buf = new StringBuffer(); while (pointer < patternLength) { char c = pattern.charAt(pointer); pointer++; switch (state) { case LITERAL_STATE: handleLiteralState(c, tokenList, buf); break; case FORMAT_MODIFIER_STATE: handleFormatModifierState(c, tokenList, buf); break; case OPTION_STATE: processOption(c, tokenList, buf); break; case KEYWORD_STATE: handleKeywordState(c, tokenList, buf); break; case RIGHT_PARENTHESIS_STATE: handleRightParenthesisState(c, tokenList, buf); break; default: } } // EOS switch (state) { case LITERAL_STATE: addValuedToken(Token.LITERAL, buf, tokenList); break; case KEYWORD_STATE: tokenList.add(new Token(Token.SIMPLE_KEYWORD, buf.toString())); break; case RIGHT_PARENTHESIS_STATE: tokenList.add(Token.RIGHT_PARENTHESIS_TOKEN); break; case FORMAT_MODIFIER_STATE: case OPTION_STATE: throw new ScanException("Unexpected end of pattern string"); } return tokenList; }
@Test public void testSimpleP2() throws ScanException { List<Token> tl = new TokenStream("X %a %-12.550(hello %class{.4?})").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(new Token(Token.LITERAL, "X ")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "a")); witness.add(new Token(Token.LITERAL, " ")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.FORMAT_MODIFIER, "-12.550")); witness.add(Token.BARE_COMPOSITE_KEYWORD_TOKEN); witness.add(new Token(Token.LITERAL, "hello ")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "class")); List<String> ol = new ArrayList<String>(); ol.add(".4?"); witness.add(new Token(Token.OPTION, ol)); witness.add(Token.RIGHT_PARENTHESIS_TOKEN); assertEquals(witness, tl); }
@Override public List<String> ls(String path) { return ls(path, null); }
@Test @Disabled public void lsTest() { List<String> files = sshjSftp.ls("/"); if (files != null && !files.isEmpty()) { files.forEach(System.out::print); } }
public List<Issue> validateMetadata(ExtensionVersion extVersion) { return Observation.createNotStarted("ExtensionValidator#validateMetadata", observations).observe(() -> { var issues = new ArrayList<Issue>(); checkVersion(extVersion.getVersion(), issues); checkTargetPlatform(extVersion.getTargetPlatform(), issues); checkCharacters(extVersion.getDisplayName(), "displayName", issues); checkFieldSize(extVersion.getDisplayName(), DEFAULT_STRING_SIZE, "displayName", issues); checkCharacters(extVersion.getDescription(), "description", issues); checkFieldSize(extVersion.getDescription(), DESCRIPTION_SIZE, "description", issues); checkCharacters(extVersion.getCategories(), "categories", issues); checkFieldSize(extVersion.getCategories(), DEFAULT_STRING_SIZE, "categories", issues); checkCharacters(extVersion.getTags(), "keywords", issues); checkFieldSize(extVersion.getTags(), DEFAULT_STRING_SIZE, "keywords", issues); checkCharacters(extVersion.getLicense(), "license", issues); checkFieldSize(extVersion.getLicense(), DEFAULT_STRING_SIZE, "license", issues); checkURL(extVersion.getHomepage(), "homepage", issues); checkFieldSize(extVersion.getHomepage(), DEFAULT_STRING_SIZE, "homepage", issues); checkURL(extVersion.getRepository(), "repository", issues); checkFieldSize(extVersion.getRepository(), DEFAULT_STRING_SIZE, "repository", issues); checkURL(extVersion.getBugs(), "bugs", issues); checkFieldSize(extVersion.getBugs(), DEFAULT_STRING_SIZE, "bugs", issues); checkInvalid(extVersion.getMarkdown(), s -> !MARKDOWN_VALUES.contains(s), "markdown", issues, MARKDOWN_VALUES.toString()); checkCharacters(extVersion.getGalleryColor(), "galleryBanner.color", issues); checkFieldSize(extVersion.getGalleryColor(), GALLERY_COLOR_SIZE, "galleryBanner.color", issues); checkInvalid(extVersion.getGalleryTheme(), s -> !GALLERY_THEME_VALUES.contains(s), "galleryBanner.theme", issues, GALLERY_THEME_VALUES.toString()); checkFieldSize(extVersion.getLocalizedLanguages(), DEFAULT_STRING_SIZE, "localizedLanguages", issues); checkInvalid(extVersion.getQna(), s -> !QNA_VALUES.contains(s) && isInvalidURL(s), "qna", issues, QNA_VALUES.toString() + " or a URL"); checkFieldSize(extVersion.getQna(), DEFAULT_STRING_SIZE, "qna", issues); return issues; }); }
@Test public void testInvalidURL3() { var extension = new ExtensionVersion(); extension.setTargetPlatform(TargetPlatform.NAME_UNIVERSAL); extension.setVersion("1.0.0"); extension.setRepository("http://"); var issues = validator.validateMetadata(extension); assertThat(issues).hasSize(1); assertThat(issues.get(0)) .isEqualTo(new ExtensionValidator.Issue("Invalid URL in field 'repository': http://")); }
public static void clean( Object func, ExecutionConfig.ClosureCleanerLevel level, boolean checkSerializable) { clean(func, level, checkSerializable, Collections.newSetFromMap(new IdentityHashMap<>())); }
@Test void testCleanObject() { assertThatExceptionOfType(InvalidProgramException.class) .isThrownBy( () -> { ClosureCleaner.clean( new Object(), ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); }); }
@Override public S3ClientBuilder createBuilder(S3Options s3Options) { return createBuilder(S3Client.builder(), s3Options); }
@Test public void testSetProxyConfiguration() { when(s3Options.getProxyConfiguration()).thenReturn(ProxyConfiguration.builder().build()); DefaultS3ClientBuilderFactory.createBuilder(builder, s3Options); verify(builder).httpClientBuilder(any(ApacheHttpClient.Builder.class)); verifyNoMoreInteractions(builder); }
@Override public <T> void execute(URI uri, String httpMethod, RequestHttpEntity requestHttpEntity, final ResponseHandler<T> responseHandler, final Callback<T> callback) throws Exception { HttpRequestBase httpRequestBase = DefaultHttpClientRequest.build(uri, httpMethod, requestHttpEntity, defaultConfig); try { asyncClient.execute(httpRequestBase, new FutureCallback<HttpResponse>() { @Override public void completed(HttpResponse result) { DefaultClientHttpResponse response = new DefaultClientHttpResponse(result); try { HttpRestResult<T> httpRestResult = responseHandler.handle(response); callback.onReceive(httpRestResult); } catch (Exception e) { callback.onError(e); } finally { HttpClientUtils.closeQuietly(result); } } @Override public void failed(Exception ex) { callback.onError(ex); } @Override public void cancelled() { callback.onCancel(); } }); } catch (IllegalStateException e) { final List<ExceptionEvent> events = ioreactor.getAuditLog(); if (events != null) { for (ExceptionEvent event : events) { if (event != null) { LOGGER.error("[DefaultAsyncHttpClientRequest] IllegalStateException! I/O Reactor error time: {}", event.getTimestamp(), event.getCause()); } } } throw e; } }
@Test void testExecuteException() throws Exception { Header header = Header.newInstance(); Map<String, String> body = new HashMap<>(); body.put("test", "test"); RequestHttpEntity httpEntity = new RequestHttpEntity(header, Query.EMPTY, body); IllegalStateException exception = new IllegalStateException("test"); when(client.execute(any(), any())).thenThrow(exception); when(ioReactor.getAuditLog()).thenReturn(Collections.singletonList(new ExceptionEvent(exception, new Date()))); try { httpClientRequest.execute(uri, "PUT", httpEntity, responseHandler, callback); } catch (Exception e) { assertEquals(exception, e); } }
public int length() { try { final StringBuilder s = read(MIN_LENGTH); pushBack.append(s); return s.length(); } catch (IOException ex) { LOGGER.warn("Oops ", ex); } return 0; }
@Test public void testLength() { String data = ""; InputStream stream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8)); XmlInputStream instance = new XmlInputStream(stream); int expResult = 0; int result = instance.length(); assertEquals(expResult, result); data = "Input data"; stream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8)); instance = new XmlInputStream(stream); result = instance.length(); assertTrue(result > 0); }
Map<String, String> execute(ServerWebExchange exchange, StainingRule stainingRule) { if (stainingRule == null) { return Collections.emptyMap(); } List<StainingRule.Rule> rules = stainingRule.getRules(); if (CollectionUtils.isEmpty(rules)) { return Collections.emptyMap(); } Map<String, String> parsedLabels = new HashMap<>(); for (StainingRule.Rule rule : rules) { List<Condition> conditions = rule.getConditions(); Set<String> keys = new HashSet<>(); conditions.forEach(condition -> keys.add(condition.getKey())); Map<String, String> actualValues = SpringWebExpressionLabelUtils.resolve(exchange, keys); if (!ConditionUtils.match(actualValues, conditions)) { continue; } parsedLabels.putAll(KVPairUtils.toMap(rule.getLabels())); } return parsedLabels; }
@Test public void testMatchTwoRulesAndNotMatchOneRule() { Condition condition1 = new Condition(); condition1.setKey("${http.header.uid}"); condition1.setOperation(Operation.EQUALS.toString()); condition1.setValues(Collections.singletonList("1000")); Condition condition2 = new Condition(); condition2.setKey("${http.query.source}"); condition2.setOperation(Operation.IN.toString()); condition2.setValues(Collections.singletonList("wx")); // rule1 matched StainingRule.Rule rule1 = new StainingRule.Rule(); rule1.setConditions(Arrays.asList(condition1, condition2)); KVPair kvPair = new KVPair(); kvPair.setKey("env"); kvPair.setValue("blue"); rule1.setLabels(Collections.singletonList(kvPair)); // rule2 matched StainingRule.Rule rule2 = new StainingRule.Rule(); rule2.setConditions(Collections.singletonList(condition1)); KVPair kvPair2 = new KVPair(); kvPair2.setKey("label1"); kvPair2.setValue("value1"); KVPair kvPair3 = new KVPair(); kvPair3.setKey("label2"); kvPair3.setValue("value2"); rule2.setLabels(Arrays.asList(kvPair2, kvPair3)); // rule3 not matched Condition condition3 = new Condition(); condition3.setKey("${http.query.type}"); condition3.setOperation(Operation.IN.toString()); condition3.setValues(Collections.singletonList("wx")); StainingRule.Rule rule3 = new StainingRule.Rule(); rule3.setConditions(Collections.singletonList(condition3)); KVPair kvPair4 = new KVPair(); kvPair4.setKey("label3"); kvPair4.setValue("value3"); rule3.setLabels(Collections.singletonList(kvPair4)); StainingRule stainingRule = new StainingRule(); stainingRule.setRules(Arrays.asList(rule1, rule2, rule3)); MockServerHttpRequest request = MockServerHttpRequest.get("/users") .queryParam("source", "wx") .header("uid", "1000").build(); MockServerWebExchange exchange = new MockServerWebExchange.Builder(request).build(); RuleStainingExecutor executor = new RuleStainingExecutor(); Map<String, String> stainedLabels = executor.execute(exchange, stainingRule); assertThat(stainedLabels).isNotNull(); assertThat(stainedLabels.size()).isEqualTo(3); assertThat(stainedLabels.get("env")).isEqualTo("blue"); assertThat(stainedLabels.get("label1")).isEqualTo("value1"); assertThat(stainedLabels.get("label2")).isEqualTo("value2"); }
static void handleDescribe(Admin adminClient) throws ExecutionException, InterruptedException { FeatureMetadata featureMetadata = adminClient.describeFeatures().featureMetadata().get(); featureMetadata.supportedFeatures().keySet().stream().sorted().forEach(feature -> { short finalizedLevel = (featureMetadata.finalizedFeatures().get(feature) == null) ? 0 : featureMetadata.finalizedFeatures().get(feature).maxVersionLevel(); SupportedVersionRange range = featureMetadata.supportedFeatures().get(feature); System.out.printf("Feature: %s\tSupportedMinVersion: %s\tSupportedMaxVersion: %s\tFinalizedVersionLevel: %s\tEpoch: %s%n", feature, levelToString(feature, range.minVersion()), levelToString(feature, range.maxVersion()), levelToString(feature, finalizedLevel), (featureMetadata.finalizedFeaturesEpoch().isPresent()) ? featureMetadata.finalizedFeaturesEpoch().get().toString() : "-"); }); }
@Test public void testHandleDescribe() { String describeResult = ToolsTestUtils.captureStandardOut(() -> { try { FeatureCommand.handleDescribe(buildAdminClient()); } catch (Exception e) { throw new RuntimeException(e); } }); assertEquals(format("Feature: foo.bar\tSupportedMinVersion: 0\tSupportedMaxVersion: 10\tFinalizedVersionLevel: 5\tEpoch: 123%n" + "Feature: metadata.version\tSupportedMinVersion: 3.3-IV0\tSupportedMaxVersion: 3.3-IV3\tFinalizedVersionLevel: 3.3-IV2\tEpoch: 123"), describeResult); }
@Path("restart/{id}") @POST @Produces(MediaType.APPLICATION_JSON) public Response restartApp(@PathParam("id") String id) { AppCatalogSolrClient sc = new AppCatalogSolrClient(); AppEntry app = sc.findAppEntry(id); Service yarnApp = app.getYarnfile(); yarnApp.setState(ServiceState.STARTED); try { YarnServiceClient yc = new YarnServiceClient(); yc.restartApp(yarnApp); } catch (JsonProcessingException e) { return Response.status(Status.BAD_REQUEST).build(); } return Response.ok().build(); }
@Test void testRestartApp() throws Exception { String id = "application 1"; AppDetailsController ac = Mockito.mock(AppDetailsController.class); Service yarnfile = new Service(); Component comp = new Component(); Container c = new Container(); c.setId("container-1"); List<Container> containers = new ArrayList<Container>(); containers.add(c); comp.setContainers(containers); yarnfile.addComponent(comp); Response expected = Response.ok().build(); when(ac.restartApp(id)).thenReturn(Response.ok().build()); final Response actual = ac.restartApp(id); assertEquals(expected.getStatus(), actual.getStatus()); }
public void removeProfileFromIndexSets(final Set<String> indexSetsIds, final boolean rotateImmediately) { for (String indexSetId : indexSetsIds) { try { indexSetService.get(indexSetId).ifPresent(indexSetConfig -> { var updatedIndexSetConfig = removeProfileFromIndexSet(indexSetConfig); if (rotateImmediately) { updatedIndexSetConfig.ifPresent(this::cycleIndexSet); } }); } catch (Exception ex) { LOG.error("Failed to update field type in index set : " + indexSetId, ex); throw ex; } } }
@Test void testRemovesProfile() { existingIndexSet = existingIndexSet.toBuilder() .fieldTypeProfile("000000000000000000000042") .build(); doReturn(Optional.of(existingIndexSet)).when(indexSetService).get("existing_index_set"); toTest.removeProfileFromIndexSets(Set.of(existingIndexSet.id()), false); verify(mongoIndexSetService).save( existingIndexSet.toBuilder() .fieldTypeProfile(null) .build()); verifyNoInteractions(existingMongoIndexSet); }
@Override public TImmutablePartitionResult updateImmutablePartition(TImmutablePartitionRequest request) throws TException { LOG.info("Receive update immutable partition: {}", request); TImmutablePartitionResult result; try { result = updateImmutablePartitionInternal(request); } catch (Throwable t) { LOG.warn(t.getMessage(), t); result = new TImmutablePartitionResult(); TStatus errorStatus = new TStatus(RUNTIME_ERROR); errorStatus.setError_msgs(Lists.newArrayList(String.format("txn_id=%d failed. %s", request.getTxn_id(), t.getMessage()))); result.setStatus(errorStatus); } LOG.info("Finish update immutable partition: {}", result); return result; }
@Test public void testImmutablePartitionException() throws TException { Database db = GlobalStateMgr.getCurrentState().getDb("test"); OlapTable table = (OlapTable) db.getTable("site_access_exception"); List<Long> partitionIds = Lists.newArrayList(); FrontendServiceImpl impl = new FrontendServiceImpl(exeEnv); TImmutablePartitionRequest request = new TImmutablePartitionRequest(); TImmutablePartitionResult partition = impl.updateImmutablePartition(request); Table t = db.getTable("v"); Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.RUNTIME_ERROR); request.setDb_id(db.getId()); partition = impl.updateImmutablePartition(request); Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.RUNTIME_ERROR); request.setTable_id(t.getId()); partition = impl.updateImmutablePartition(request); Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.RUNTIME_ERROR); request.setTable_id(table.getId()); partition = impl.updateImmutablePartition(request); Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.RUNTIME_ERROR); request.setPartition_ids(partitionIds); partition = impl.updateImmutablePartition(request); Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.OK); partitionIds.add(1L); request.setPartition_ids(partitionIds); partition = impl.updateImmutablePartition(request); Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.OK); partitionIds = table.getPhysicalPartitions().stream() .map(PhysicalPartition::getId).collect(Collectors.toList()); request.setPartition_ids(partitionIds); partition = impl.updateImmutablePartition(request); Assert.assertEquals(partition.getStatus().getStatus_code(), TStatusCode.OK); }
@KeyboardExtension.KeyboardExtensionType public int getExtensionType() { return mExtensionType; }
@Test public void testGetCurrentKeyboardExtensionBottomChanged() throws Exception { AnyApplication.getBottomRowFactory(getApplicationContext()) .setAddOnEnabled("3659b9e0-dee2-11e0-9572-0800200c9a55", true); KeyboardExtension extension = AnyApplication.getBottomRowFactory(getApplicationContext()).getEnabledAddOn(); Assert.assertNotNull(extension); Assert.assertEquals("3659b9e0-dee2-11e0-9572-0800200c9a55", extension.getId()); Assert.assertEquals(KeyboardExtension.TYPE_BOTTOM, extension.getExtensionType()); Assert.assertEquals(R.xml.ext_kbd_bottom_row_iphone, extension.getKeyboardResId()); }
@VisibleForTesting void setIsPartialBufferCleanupRequired() { isPartialBufferCleanupRequired = true; }
@TestTemplate void testSkipPartialDataLongRecordEndWithBuffer() throws Exception { final BufferWritingResultPartition writer = createResultPartition(); final PipelinedApproximateSubpartition subpartition = getPipelinedApproximateSubpartition(writer); writer.emitRecord(toByteBuffer(0, 1, 2, 3, 4, 5, 6, 42), 0); writer.emitRecord(toByteBuffer(100, 101, 102), 0); assertContent(requireNonNull(subpartition.pollBuffer()).buffer(), null, 0, 1, 2, 3); subpartition.setIsPartialBufferCleanupRequired(); assertContent(requireNonNull(subpartition.pollBuffer()).buffer(), null, 100, 101, 102); }
public ProjectList searchProjects(String gitlabUrl, String personalAccessToken, @Nullable String projectName, @Nullable Integer pageNumber, @Nullable Integer pageSize) { String url = format("%s/projects?archived=false&simple=true&membership=true&order_by=name&sort=asc&search=%s%s%s", gitlabUrl, projectName == null ? "" : urlEncode(projectName), pageNumber == null ? "" : format("&page=%d", pageNumber), pageSize == null ? "" : format("&per_page=%d", pageSize) ); LOG.debug("get projects : [{}]", url); Request request = new Request.Builder() .addHeader(PRIVATE_TOKEN, personalAccessToken) .url(url) .get() .build(); try (Response response = client.newCall(request).execute()) { Headers headers = response.headers(); checkResponseIsSuccessful(response, "Could not get projects from GitLab instance"); List<Project> projectList = Project.parseJsonArray(response.body().string()); int returnedPageNumber = parseAndGetIntegerHeader(headers.get("X-Page")); int returnedPageSize = parseAndGetIntegerHeader(headers.get("X-Per-Page")); String xtotal = headers.get("X-Total"); Integer totalProjects = Strings.isEmpty(xtotal) ? null : parseAndGetIntegerHeader(xtotal); return new ProjectList(projectList, returnedPageNumber, returnedPageSize, totalProjects); } catch (JsonSyntaxException e) { throw new IllegalArgumentException("Could not parse GitLab answer to search projects. Got a non-json payload as result."); } catch (IOException e) { logException(url, e); throw new IllegalStateException(e.getMessage(), e); } }
@Test public void fail_search_projects_with_unexpected_io_exception_with_detailed_log() throws IOException { server.shutdown(); assertThatThrownBy(() -> underTest.searchProjects(gitlabUrl, "token", null, 1, 1)) .isInstanceOf(IllegalStateException.class) .hasMessageContaining("Failed to connect to"); assertThat(logTester.logs(Level.INFO).get(0)) .contains( "Gitlab API call to [" + server.url("/projects?archived=false&simple=true&membership=true&order_by=name&sort=asc&search=&page=1&per_page=1") + "] " + "failed with error message : [Failed to connect to " + server.getHostName()); }
public static String jaasConfig(String moduleName, Map<String, String> options) { StringJoiner joiner = new StringJoiner(" "); for (Entry<String, String> entry : options.entrySet()) { String key = Objects.requireNonNull(entry.getKey()); String value = Objects.requireNonNull(entry.getValue()); if (key.contains("=") || key.contains(";")) { throw new IllegalArgumentException("Keys must not contain '=' or ';'"); } if (moduleName.isEmpty() || moduleName.contains(";") || moduleName.contains("=")) { throw new IllegalArgumentException("module name must be not empty and must not contain '=' or ';'"); } else { joiner.add(key + "=\"" + value + "\""); } } return moduleName + " required " + joiner + ";"; }
@Test public void testKeyContainsEqualSign() { Map<String, String> options = new HashMap<>(); options.put("key1=", "value1"); String moduleName = "Module"; assertThrows(IllegalArgumentException.class, () -> AuthenticationUtils.jaasConfig(moduleName, options)); }
@Override public boolean put(K key, V value) { return get(putAsync(key, value)); }
@Test public void testContainsKey() { RListMultimap<SimpleKey, SimpleValue> map = redisson.getListMultimap("test1"); map.put(new SimpleKey("0"), new SimpleValue("1")); assertThat(map.containsKey(new SimpleKey("0"))).isTrue(); assertThat(map.containsKey(new SimpleKey("1"))).isFalse(); }
@Override public void clear() { throw MODIFICATION_ATTEMPT_ERROR; }
@Test void testClear() throws Exception { List<Long> list = getStateContents(); assertThat(list).containsExactly(42L); assertThatThrownBy(() -> listState.clear()) .isInstanceOf(UnsupportedOperationException.class); }
public abstract String format(@Nonnull Object input, @Nonnull Locale locale);
@Test public void testDecimalFormat() { DecimalFormat df = new DecimalFormat("#.#"); df.setRoundingMode(RoundingMode.HALF_UP); assertNotEquals("0.2", df.format(0.15)); }