focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public IntValue increment(int increment) { this.value += increment; this.set = true; return this; }
@Test public void increment_IntValue_increments_by_the_value_of_the_arg() { IntValue source = new IntValue().increment(10); IntValue target = new IntValue().increment(source); verifySetValue(target, 10); }
@Override public boolean supportsFullOuterJoins() { return false; }
@Test void assertSupportsFullOuterJoins() { assertFalse(metaData.supportsFullOuterJoins()); }
public static String get(String regex, CharSequence content, int groupIndex) { if (null == content || null == regex) { return null; } final Pattern pattern = PatternPool.get(regex, Pattern.DOTALL); return get(pattern, content, groupIndex); }
@Test public void getTest() { final String resultGet = ReUtil.get("\\w{2}", content, 0); assertEquals("ZZ", resultGet); }
@Override public T set(K name, V value) { throw new UnsupportedOperationException("read only"); }
@Test public void testSetStringValues() { assertThrows(UnsupportedOperationException.class, new Executable() { @Override public void execute() { HEADERS.set("name", "value1", "value2"); } }); }
@Override public Map<String, TopicDescription> describeTopics(final Collection<String> topicNames) { try { return ExecutorUtil.executeWithRetries( () -> adminClient.get().describeTopics( topicNames, new DescribeTopicsOptions().includeAuthorizedOperations(true) ).allTopicNames().get(), ExecutorUtil.RetryBehaviour.ON_RETRYABLE); } catch (final ExecutionException e) { throw new KafkaResponseGetFailedException( "Failed to Describe Kafka Topic(s): " + topicNames, e.getCause()); } catch (final TopicAuthorizationException e) { throw new KsqlTopicAuthorizationException( AclOperation.DESCRIBE, topicNames); } catch (final Exception e) { throw new KafkaResponseGetFailedException( "Failed to Describe Kafka Topic(s): " + topicNames, e); } }
@Test public void shouldThrowOnDescribeTopicsWhenRetriesExpire() { // Given: when(adminClient.describeTopics(anyCollection(), any())) .thenAnswer(describeTopicsResult(new UnknownTopicOrPartitionException("meh"))) .thenAnswer(describeTopicsResult(new UnknownTopicOrPartitionException("meh"))) .thenAnswer(describeTopicsResult(new UnknownTopicOrPartitionException("meh"))) .thenAnswer(describeTopicsResult(new UnknownTopicOrPartitionException("meh"))) .thenAnswer(describeTopicsResult(new UnknownTopicOrPartitionException("meh"))); // When: assertThrows( KafkaResponseGetFailedException.class, () -> kafkaTopicClient.describeTopics(Collections.singleton("aTopic")) ); }
public void finish(StreamTaskActionExecutor actionExecutor, StopMode stopMode) throws Exception { if (!isHead && stopMode == StopMode.DRAIN) { // NOTE: This only do for the case where the operator is one-input operator. At present, // any non-head operator on the operator chain is one-input operator. actionExecutor.runThrowing(() -> endOperatorInput(1)); } quiesceTimeServiceAndFinishOperator(actionExecutor, stopMode); // propagate the close operation to the next wrapper if (next != null) { next.finish(actionExecutor, stopMode); } }
@Test void testFinish() throws Exception { output.clear(); operatorWrappers.get(0).finish(containingTask.getActionExecutor(), StopMode.DRAIN); List<Object> expected = new ArrayList<>(); for (int i = 0; i < operatorWrappers.size(); i++) { String prefix = "[" + "Operator" + i + "]"; Collections.addAll( expected, prefix + ": End of input", prefix + ": Timer that was in mailbox before closing operator", prefix + ": Bye", prefix + ": Mail to put in mailbox when finishing operator"); } assertThat(output) .as("Output was not correct.") .containsExactlyElementsOf(expected.subList(2, expected.size())); }
@Override public List<Integer> applyTransforms(List<Integer> originalGlyphIds) { List<Integer> intermediateGlyphsFromGsub = adjustRephPosition(originalGlyphIds); intermediateGlyphsFromGsub = repositionGlyphs(intermediateGlyphsFromGsub); for (String feature : FEATURES_IN_ORDER) { if (!gsubData.isFeatureSupported(feature)) { if (feature.equals(RKRF_FEATURE) && gsubData.isFeatureSupported(VATU_FEATURE)) { // Create your own rkrf feature from vatu feature intermediateGlyphsFromGsub = applyRKRFFeature( gsubData.getFeature(VATU_FEATURE), intermediateGlyphsFromGsub); } LOG.debug("the feature {} was not found", feature); continue; } LOG.debug("applying the feature {}", feature); ScriptFeature scriptFeature = gsubData.getFeature(feature); intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature, intermediateGlyphsFromGsub); } return Collections.unmodifiableList(intermediateGlyphsFromGsub); }
@Test void testApplyTransforms_rkrf() { // given List<Integer> glyphsAfterGsub = Arrays.asList(242,228,250); // when List<Integer> result = gsubWorkerForGujarati.applyTransforms(getGlyphIds("પ્રક્રવ્ર")); // then assertEquals(glyphsAfterGsub, result); }
@Override public void process(Exchange exchange) throws Exception { if (logger.isDebugEnabled()) { logger.debug("uploader thread/id: {} / {}. api call completed.", Thread.currentThread().getId(), exchange.getExchangeId()); } if (exchange.getIn().getBody() instanceof List) { boolean groupedExchanges = false; for (Object body : exchange.getIn().getBody(List.class)) { if (body instanceof Exchange) { send((Exchange) body); groupedExchanges = true; } } if (!groupedExchanges) { send(exchange); } } else { send(exchange); } }
@Test public void testProcess() throws Exception { GooglePubsubLiteProducer producer = new GooglePubsubLiteProducer(mockEndpoint); String testPayload = "Test Payload"; when(mockExchange.getIn()).thenReturn(mockMessage); when(mockEndpoint.getProjectId()).thenReturn(123456789012L); when(mockEndpoint.getLocation()).thenReturn("europe-west3"); when(mockEndpoint.getDestinationName()).thenReturn("testDestination"); when(mockEndpoint.getComponent()).thenReturn(mock(GooglePubsubLiteComponent.class)); when(mockEndpoint.getComponent().getPublisher(any(), any())).thenReturn(mockPublisher); when(mockExchange.getMessage()).thenReturn(mockMessage); when(mockMessage.getBody()).thenReturn(testPayload.getBytes()); when(mockExchange.getMessage().getHeader(GooglePubsubLiteConstants.ATTRIBUTES, Map.class)).thenReturn(null); when(mockExchange.getMessage().getHeader(GooglePubsubLiteConstants.ORDERING_KEY, String.class)).thenReturn(null); when(mockPublisher.publish(any())).thenReturn(ApiFutures.immediateFuture("messageId")); producer.process(mockExchange); verify(mockPublisher, times(1)).publish(any()); }
@Override public boolean isNeedReloadConfiguration() { return false; }
@Test void testIsNeedReloadConfiguration() { assertFalse(logbackNacosLoggingAdapter.isNeedReloadConfiguration()); }
public static SkillChallengeClue forText(String text, String rawText) { for (SkillChallengeClue clue : CLUES) { if (rawText.equalsIgnoreCase(clue.returnText)) { clue.setChallengeCompleted(true); return clue; } else if (text.equals(clue.rawChallenge)) { clue.setChallengeCompleted(false); return clue; } } return null; }
@Test public void itemRequirementsCreateABarrowsTeleportTablet() { String clueText = "Create a Barrows teleport tablet."; SkillChallengeClue barrowsClue = SkillChallengeClue.forText(clueText.toLowerCase(), clueText.toLowerCase()); ItemRequirement[] requirements = barrowsClue.getItemRequirements(); assertEquals(4, requirements.length); ItemRequirement darkEssenceBlock = requirements[0]; ItemRequirement bloodRune = requirements[1]; ItemRequirement lawRune = requirements[2]; ItemRequirement soulRune = requirements[3]; assertTrue("Dark Essence Block", darkEssenceBlock.fulfilledBy(DARK_ESSENCE_BLOCK)); assertTrue("Blood Rune x1", bloodRune.fulfilledBy(BLOOD_RUNE)); assertTrue("Law Rune x2", lawRune.fulfilledBy(LAW_RUNE)); assertTrue("Soul Rune x2", soulRune.fulfilledBy(SOUL_RUNE)); }
public static FieldScope ignoringFields(int firstFieldNumber, int... rest) { return FieldScopeImpl.createIgnoringFields(asList(firstFieldNumber, rest)); }
@Test public void testIgnoringTopLevelField_ignoringField() { expectThat(ignoringFieldDiffMessage) .ignoringFields(goodFieldNumber) .isNotEqualTo(ignoringFieldMessage); expectThat(ignoringFieldDiffMessage) .ignoringFields(badFieldNumber) .isEqualTo(ignoringFieldMessage); expectFailureWhenTesting() .that(ignoringFieldDiffMessage) .ignoringFields(goodFieldNumber) .isEqualTo(ignoringFieldMessage); expectIsEqualToFailed(); expectThatFailure().hasMessageThat().contains("modified: r_string[0]: \"foo\" -> \"bar\""); expectFailureWhenTesting() .that(ignoringFieldDiffMessage) .ignoringFields(badFieldNumber) .isNotEqualTo(ignoringFieldMessage); expectIsNotEqualToFailed(); expectThatFailure().hasMessageThat().contains("ignored: r_string"); }
@Override public Map<String, Metric> getMetrics() { final Map<String, Metric> gauges = new HashMap<>(); gauges.put("total.init", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getInit() + mxBean.getNonHeapMemoryUsage().getInit()); gauges.put("total.used", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getUsed() + mxBean.getNonHeapMemoryUsage().getUsed()); gauges.put("total.max", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getMax() == -1 ? -1 : mxBean.getHeapMemoryUsage().getMax() + mxBean.getNonHeapMemoryUsage().getMax()); gauges.put("total.committed", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getCommitted() + mxBean.getNonHeapMemoryUsage().getCommitted()); gauges.put("heap.init", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getInit()); gauges.put("heap.used", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getUsed()); gauges.put("heap.max", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getMax()); gauges.put("heap.committed", (Gauge<Long>) () -> mxBean.getHeapMemoryUsage().getCommitted()); gauges.put("heap.usage", new RatioGauge() { @Override protected Ratio getRatio() { final MemoryUsage usage = mxBean.getHeapMemoryUsage(); return Ratio.of(usage.getUsed(), usage.getMax()); } }); gauges.put("non-heap.init", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getInit()); gauges.put("non-heap.used", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getUsed()); gauges.put("non-heap.max", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getMax()); gauges.put("non-heap.committed", (Gauge<Long>) () -> mxBean.getNonHeapMemoryUsage().getCommitted()); gauges.put("non-heap.usage", new RatioGauge() { @Override protected Ratio getRatio() { final MemoryUsage usage = mxBean.getNonHeapMemoryUsage(); return Ratio.of(usage.getUsed(), usage.getMax() == -1 ? usage.getCommitted() : usage.getMax()); } }); for (final MemoryPoolMXBean pool : memoryPools) { final String poolName = name("pools", WHITESPACE.matcher(pool.getName()).replaceAll("-")); gauges.put(name(poolName, "usage"), new RatioGauge() { @Override protected Ratio getRatio() { MemoryUsage usage = pool.getUsage(); return Ratio.of(usage.getUsed(), usage.getMax() == -1 ? usage.getCommitted() : usage.getMax()); } }); gauges.put(name(poolName, "max"), (Gauge<Long>) () -> pool.getUsage().getMax()); gauges.put(name(poolName, "used"), (Gauge<Long>) () -> pool.getUsage().getUsed()); gauges.put(name(poolName, "committed"), (Gauge<Long>) () -> pool.getUsage().getCommitted()); // Only register GC usage metrics if the memory pool supports usage statistics. if (pool.getCollectionUsage() != null) { gauges.put(name(poolName, "used-after-gc"), (Gauge<Long>) () -> pool.getCollectionUsage().getUsed()); } gauges.put(name(poolName, "init"), (Gauge<Long>) () -> pool.getUsage().getInit()); } return Collections.unmodifiableMap(gauges); }
@Test public void hasAGaugeForTotalMaxWhenNonHeapMaxUndefined() { when(nonHeap.getMax()).thenReturn(-1L); final Gauge gauge = (Gauge) gauges.getMetrics().get("total.max"); assertThat(gauge.getValue()) .isEqualTo(-1L); }
public static Rect getTilesRect(final BoundingBox pBB, final int pZoomLevel) { final int mapTileUpperBound = 1 << pZoomLevel; final int right = MapView.getTileSystem().getTileXFromLongitude(pBB.getLonEast(), pZoomLevel); final int bottom = MapView.getTileSystem().getTileYFromLatitude(pBB.getLatSouth(), pZoomLevel); final int left = MapView.getTileSystem().getTileXFromLongitude(pBB.getLonWest(), pZoomLevel); final int top = MapView.getTileSystem().getTileYFromLatitude(pBB.getLatNorth(), pZoomLevel); int width = right - left + 1; // handling the modulo if (width <= 0) { width += mapTileUpperBound; } int height = bottom - top + 1; // handling the modulo if (height <= 0) { height += mapTileUpperBound; } return new Rect(left, top, left + width - 1, top + height - 1); }
@Test public void testGetTilesRectSingleTile() { final TileSystem tileSystem = MapView.getTileSystem(); final BoundingBox box = new BoundingBox(); for (int zoom = 0; zoom <= TileSystem.getMaximumZoomLevel(); zoom++) { final double longitude = tileSystem.getRandomLongitude(mRandom.nextDouble()); final double latitude = tileSystem.getRandomLatitude(mRandom.nextDouble()); box.set(latitude, longitude, latitude, longitude); // single point final Rect rect = CacheManager.getTilesRect(box, zoom); Assert.assertEquals(rect.left, rect.right); // single tile expected Assert.assertEquals(rect.top, rect.bottom); // single tile expected } }
public static boolean isPublicInstanceField(Field field) { return Modifier.isPublic(field.getModifiers()) && !Modifier.isStatic(field.getModifiers()) && !Modifier.isFinal(field.getModifiers()) && !field.isSynthetic(); }
@Test void testIsPublicInstanceField() throws Exception { Field field = EmptyClass.class.getDeclaredField("set"); assertTrue(ReflectUtils.isPublicInstanceField(field)); field = EmptyClass.class.getDeclaredField("property"); assertFalse(ReflectUtils.isPublicInstanceField(field)); }
@Override public Response submitReservation(ReservationSubmissionRequestInfo resContext, HttpServletRequest hsr) throws AuthorizationException, IOException, InterruptedException { long startTime = clock.getTime(); if (resContext == null || resContext.getReservationId() == null || resContext.getReservationDefinition() == null || resContext.getQueue() == null) { routerMetrics.incrSubmitReservationFailedRetrieved(); String errMsg = "Missing submitReservation resContext or reservationId " + "or reservation definition or queue."; RouterAuditLogger.logFailure(getUser().getShortUserName(), SUBMIT_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, errMsg); return Response.status(Status.BAD_REQUEST).entity(errMsg).build(); } // Check that the resId format is accurate String resId = resContext.getReservationId(); try { RouterServerUtil.validateReservationId(resId); } catch (IllegalArgumentException e) { routerMetrics.incrSubmitReservationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), SUBMIT_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); throw e; } List<SubClusterId> blackList = new ArrayList<>(); try { int activeSubClustersCount = federationFacade.getActiveSubClustersCount(); int actualRetryNums = Math.min(activeSubClustersCount, numSubmitRetries); Response response = ((FederationActionRetry<Response>) (retryCount) -> invokeSubmitReservation(resContext, blackList, hsr, retryCount)). runWithRetries(actualRetryNums, submitIntervalTime); if (response != null) { long stopTime = clock.getTime(); RouterAuditLogger.logSuccess(getUser().getShortUserName(), SUBMIT_RESERVATION, TARGET_WEB_SERVICE); routerMetrics.succeededSubmitReservationRetrieved(stopTime - startTime); return response; } } catch (Exception e) { routerMetrics.incrSubmitReservationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), SUBMIT_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); return Response.status(Status.SERVICE_UNAVAILABLE).entity(e.getLocalizedMessage()).build(); } routerMetrics.incrSubmitReservationFailedRetrieved(); String msg = String.format("Reservation %s failed to be submitted.", resId); RouterAuditLogger.logFailure(getUser().getShortUserName(), SUBMIT_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, msg); return Response.status(Status.SERVICE_UNAVAILABLE).entity(msg).build(); }
@Test public void testSubmitReservation() throws Exception { // submit reservation ReservationId reservationId = ReservationId.newInstance(Time.now(), 2); Response response = submitReservation(reservationId); Assert.assertNotNull(response); Assert.assertEquals(Status.ACCEPTED.getStatusCode(), response.getStatus()); String applyReservationId = reservationId.toString(); Response reservationResponse = interceptor.listReservation( QUEUE_DEDICATED_FULL, applyReservationId, -1, -1, false, null); Assert.assertNotNull(reservationResponse); Object entity = reservationResponse.getEntity(); Assert.assertNotNull(entity); Assert.assertNotNull(entity instanceof ReservationListInfo); Assert.assertTrue(entity instanceof ReservationListInfo); ReservationListInfo listInfo = (ReservationListInfo) entity; Assert.assertNotNull(listInfo); List<ReservationInfo> reservationInfos = listInfo.getReservations(); Assert.assertNotNull(reservationInfos); Assert.assertEquals(1, reservationInfos.size()); ReservationInfo reservationInfo = reservationInfos.get(0); Assert.assertNotNull(reservationInfo); Assert.assertEquals(reservationInfo.getReservationId(), applyReservationId); }
public static String byteArrayToHexString(byte[] in, int start, int length) { String asHexString = null; if (in != null) { StringBuilder out = new StringBuilder(in.length * 2); for (int x = start; x < length; x++) { int nybble = in[x] & 0xF0; nybble = nybble >>> 4; out.append(hexChars[nybble]); out.append(hexChars[in[x] & 0x0F]); } asHexString = out.toString(); } return asHexString; }
@Test void testByteArrayToHex() { assertEquals("0123456789abcdef", byteArrayToHexString(array)); }
@Override public void destroy() { notifyListeners.clear(); domainCache.clear(); if (scheduledExecutorService != null) { scheduledExecutorService.shutdown(); } }
@Test public void testDestroy() { domainRegistry.init(); ConsumerConfig<Object> config1 = new ConsumerConfig<>(); String direct1 = "2"; config1.setDirectUrl(direct1); domainRegistry.subscribe(config1); assertTrue(domainRegistry.scheduledExecutorService.isStarted()); assertTrue(domainRegistry.notifyListeners.containsKey(direct1)); domainRegistry.destroy(); assertFalse(domainRegistry.scheduledExecutorService.isStarted()); assertEquals(0, domainRegistry.notifyListeners.size()); }
public URL mergeUrl(URL remoteUrl, Map<String, String> localMap) { String ump = localMap.get(URL_MERGE_PROCESSOR_KEY); ProviderURLMergeProcessor providerUrlMergeProcessor; if (StringUtils.isNotEmpty(ump)) { providerUrlMergeProcessor = applicationModel .getExtensionLoader(ProviderURLMergeProcessor.class) .getExtension(ump); } else { providerUrlMergeProcessor = applicationModel .getExtensionLoader(ProviderURLMergeProcessor.class) .getExtension("default"); } return providerUrlMergeProcessor.mergeUrl(remoteUrl, localMap); }
@Test void testMergeUrl() { URL providerURL = URL.valueOf("dubbo://localhost:55555"); providerURL = providerURL.setPath("path").setUsername("username").setPassword("password"); providerURL = URLBuilder.from(providerURL) .addParameter(GROUP_KEY, "dubbo") .addParameter(VERSION_KEY, "1.2.3") .addParameter(DUBBO_VERSION_KEY, "2.3.7") .addParameter(THREADPOOL_KEY, "fixed") .addParameter(THREADS_KEY, Integer.MAX_VALUE) .addParameter(THREAD_NAME_KEY, "test") .addParameter(CORE_THREADS_KEY, Integer.MAX_VALUE) .addParameter(QUEUES_KEY, Integer.MAX_VALUE) .addParameter(ALIVE_KEY, Integer.MAX_VALUE) .addParameter(DEFAULT_KEY_PREFIX + THREADS_KEY, Integer.MAX_VALUE) .addParameter(DEFAULT_KEY_PREFIX + THREADPOOL_KEY, "fixed") .addParameter(DEFAULT_KEY_PREFIX + CORE_THREADS_KEY, Integer.MAX_VALUE) .addParameter(DEFAULT_KEY_PREFIX + QUEUES_KEY, Integer.MAX_VALUE) .addParameter(DEFAULT_KEY_PREFIX + ALIVE_KEY, Integer.MAX_VALUE) .addParameter(DEFAULT_KEY_PREFIX + THREAD_NAME_KEY, "test") .addParameter(APPLICATION_KEY, "provider") .addParameter(REFERENCE_FILTER_KEY, "filter1,filter2") .addParameter(TAG_KEY, "TTT") .build(); // Verify default ProviderURLMergeProcessor URL consumerURL = new URLBuilder(DUBBO_PROTOCOL, "localhost", 55555) .addParameter(PID_KEY, "1234") .addParameter(THREADPOOL_KEY, "foo") .addParameter(APPLICATION_KEY, "consumer") .addParameter(REFERENCE_FILTER_KEY, "filter3") .addParameter(TAG_KEY, "UUU") .build(); URL url = clusterUtils.mergeUrl(providerURL, consumerURL.getParameters()); Assertions.assertFalse(url.hasParameter(THREADS_KEY)); Assertions.assertFalse(url.hasParameter(DEFAULT_KEY_PREFIX + THREADS_KEY)); Assertions.assertFalse(url.hasParameter(DEFAULT_KEY_PREFIX + THREADPOOL_KEY)); Assertions.assertFalse(url.hasParameter(CORE_THREADS_KEY)); Assertions.assertFalse(url.hasParameter(DEFAULT_KEY_PREFIX + CORE_THREADS_KEY)); Assertions.assertFalse(url.hasParameter(QUEUES_KEY)); Assertions.assertFalse(url.hasParameter(DEFAULT_KEY_PREFIX + QUEUES_KEY)); Assertions.assertFalse(url.hasParameter(ALIVE_KEY)); Assertions.assertFalse(url.hasParameter(DEFAULT_KEY_PREFIX + ALIVE_KEY)); Assertions.assertFalse(url.hasParameter(THREAD_NAME_KEY)); Assertions.assertFalse(url.hasParameter(DEFAULT_KEY_PREFIX + THREAD_NAME_KEY)); Assertions.assertEquals("path", url.getPath()); Assertions.assertEquals("username", url.getUsername()); Assertions.assertEquals("password", url.getPassword()); Assertions.assertEquals("1234", url.getParameter(PID_KEY)); Assertions.assertEquals("foo", url.getParameter(THREADPOOL_KEY)); Assertions.assertEquals("consumer", url.getApplication()); Assertions.assertEquals("provider", url.getRemoteApplication()); Assertions.assertEquals("filter1,filter2,filter3", url.getParameter(REFERENCE_FILTER_KEY)); Assertions.assertEquals("TTT", url.getParameter(TAG_KEY)); // Verify custom ProviderURLMergeProcessor URL consumerUrlForTag = new URLBuilder(DUBBO_PROTOCOL, "localhost", 55555) .addParameter(PID_KEY, "1234") .addParameter(THREADPOOL_KEY, "foo") .addParameter(APPLICATION_KEY, "consumer") .addParameter(REFERENCE_FILTER_KEY, "filter3") .addParameter(TAG_KEY, "UUU") .addParameter(URL_MERGE_PROCESSOR_KEY, "tag") .build(); URL urlForTag = clusterUtils.mergeUrl(providerURL, consumerUrlForTag.getParameters()); Assertions.assertEquals("UUU", urlForTag.getParameter(TAG_KEY)); }
@Override public void delete(Object o) throws CacheWriterException { long startNanos = Timer.nanos(); try { delegate.get().delete(o); } finally { deleteProbe.recordValue(Timer.nanosElapsed(startNanos)); } }
@Test public void delete() { Cache.Entry<Integer, String> entry = new CacheEntry<>(1, "peter"); cacheWriter.delete(entry); verify(delegate).delete(entry); assertProbeCalledOnce("delete"); }
public static String buildMultiValueQuery(String key, Iterable<Object> values) { StringBuilder sb = new StringBuilder(); for (Object v : values) { if (!sb.isEmpty()) { sb.append("&"); } sb.append(key); sb.append("="); sb.append(v); } return sb.toString(); }
@Test public void testBuildMultiValueQuery() { List<Object> list = new ArrayList<>(); assertEquals("", URISupport.buildMultiValueQuery("id", list)); list = List.of("hello"); assertEquals("id=hello", URISupport.buildMultiValueQuery("id", list)); list = List.of(1, 2, 3); assertEquals("id=1&id=2&id=3", URISupport.buildMultiValueQuery("id", list)); list = List.of("foo", "bar", 3, true, "baz"); assertEquals("hey=foo&hey=bar&hey=3&hey=true&hey=baz", URISupport.buildMultiValueQuery("hey", list)); }
@Override public void delete(GrokPattern nativeEntity) { grokPatternService.delete(nativeEntity.id()); }
@Test public void delete() throws ValidationException { final GrokPattern grokPattern = grokPatternService.save(GrokPattern.create("Test1", "[a-z]+")); grokPatternService.save(GrokPattern.create("Test2", "[a-z]+")); assertThat(grokPatternService.loadAll()).hasSize(2); facade.delete(grokPattern); assertThat(grokPatternService.loadAll()).hasSize(1); }
Collection<OutputFile> compile() { List<OutputFile> out = new ArrayList<>(queue.size() + 1); for (Schema schema : queue) { out.add(compile(schema)); } if (protocol != null) { out.add(compileInterface(protocol)); } return out; }
@Test void unionAndFixedFields() throws Exception { Schema unionTypesWithMultipleFields = new Schema.Parser() .parse(new File("src/test/resources/union_and_fixed_fields.avsc")); assertCompilesWithJavaCompiler(new File(this.outputFile, "testUnionAndFixedFields"), new SpecificCompiler(unionTypesWithMultipleFields).compile()); }
@Override public boolean isExisted(final String originalName) { return encryptTable.isCipherColumn(originalName) || !encryptTable.isAssistedQueryColumn(originalName) && !encryptTable.isLikeQueryColumn(originalName); }
@Test void assertIsExistedWithAssistedQueryColumn() { EncryptTable encryptTable = mock(EncryptTable.class); when(encryptTable.isAssistedQueryColumn("assisted_query_column")).thenReturn(true); EncryptColumnExistedReviser reviser = new EncryptColumnExistedReviser(encryptTable); assertFalse(reviser.isExisted("assisted_query_column")); }
public static JSONObject parseObj(String jsonStr) { return new JSONObject(jsonStr); }
@Test public void parseNumberTest2() { final JSONObject json = JSONUtil.parseObj(123L, JSONConfig.create().setIgnoreError(true)); assertEquals(new JSONObject(), json); }
public boolean isLaterThan(MembersView other) { return version > other.version; }
@Test public void isLaterThan() { MembersView view1 = MembersView.createNew(1, Arrays.asList(MemberMapTest.newMembers(5))); MembersView view2 = MembersView.createNew(3, Arrays.asList(MemberMapTest.newMembers(5))); MembersView view3 = MembersView.createNew(5, Arrays.asList(MemberMapTest.newMembers(5))); assertTrue(view2.isLaterThan(view1)); assertTrue(view3.isLaterThan(view1)); assertTrue(view3.isLaterThan(view2)); assertFalse(view1.isLaterThan(view1)); assertFalse(view1.isLaterThan(view2)); assertFalse(view1.isLaterThan(view3)); assertFalse(view2.isLaterThan(view2)); assertFalse(view2.isLaterThan(view3)); assertFalse(view3.isLaterThan(view3)); }
@Override public final T apply(@Nullable F input) { checkArgument(input != null, "Null inputs are not allowed in this function"); return doApply(input); }
@Test public void apply() { assertThat(underTest.apply("foo")).isEqualTo(3); }
public static String generatePartitionMetadataTableName(String databaseId) { // There are 11 characters in the name format. // Maximum Spanner database ID length is 30 characters. // UUID always generates a String with 36 characters. // Since the Postgres table name length is 63, we may need to truncate the table name depending // on the database length. String fullString = String.format(PARTITION_METADATA_TABLE_NAME_FORMAT, databaseId, UUID.randomUUID()) .replaceAll("-", "_"); if (fullString.length() < MAX_TABLE_NAME_LENGTH) { return fullString; } return fullString.substring(0, MAX_TABLE_NAME_LENGTH); }
@Test public void testGenerateMetadataTableNameIsShorterThan64Characters() { final String tableName = NameGenerator.generatePartitionMetadataTableName("my-database-id1-maximum-length"); assertTrue(tableName.length() <= MAXIMUM_POSTGRES_TABLE_NAME_LENGTH); }
private void stop(int numOfServicesStarted, boolean stopOnlyStartedServices) { // stop in reverse order of start Exception firstException = null; List<Service> services = getServices(); for (int i = numOfServicesStarted - 1; i >= 0; i--) { Service service = services.get(i); if (LOG.isDebugEnabled()) { LOG.debug("Stopping service #" + i + ": " + service); } STATE state = service.getServiceState(); //depending on the stop police if (state == STATE.STARTED || (!stopOnlyStartedServices && state == STATE.INITED)) { Exception ex = ServiceOperations.stopQuietly(LOG, service); if (ex != null && firstException == null) { firstException = ex; } } } //after stopping all services, rethrow the first exception raised if (firstException != null) { throw ServiceStateException.convert(firstException); } }
@Test(timeout = 10000) public void testAddStoppedChildInStop() throws Throwable { CompositeService parent = new CompositeService("parent"); BreakableService child = new BreakableService(); child.init(new Configuration()); child.start(); child.stop(); parent.init(new Configuration()); parent.start(); parent.stop(); AddSiblingService.addChildToService(parent, child); }
public static boolean regionMatches(final CharSequence cs, final boolean ignoreCase, final int thisStart, final CharSequence substring, final int start, final int length) { if (cs instanceof String && substring instanceof String) { return ((String) cs).regionMatches(ignoreCase, thisStart, (String) substring, start, length); } int index1 = thisStart; int index2 = start; int tmpLen = length; while (tmpLen-- > 0) { final char c1 = cs.charAt(index1++); final char c2 = substring.charAt(index2++); if (c1 == c2) { continue; } if (!ignoreCase) { return false; } // The same check as in String.regionMatches(): if (Character.toUpperCase(c1) != Character.toUpperCase(c2) && Character.toLowerCase(c1) != Character .toLowerCase(c2)) { return false; } } return true; }
@Test void testRegionMatchesEqualsCaseSensitiveForNonString() { assertTrue(StringUtils.regionMatches(new StringBuilder("abc"), false, 0, "xabc", 1, 3)); }
@Override public boolean alterOffsets(Map<String, String> config, Map<Map<String, ?>, Map<String, ?>> offsets) { for (Map.Entry<Map<String, ?>, Map<String, ?>> offsetEntry : offsets.entrySet()) { Map<String, ?> sourceOffset = offsetEntry.getValue(); if (sourceOffset == null) { // We allow tombstones for anything; if there's garbage in the offsets for the connector, we don't // want to prevent users from being able to clean it up using the REST API continue; } Map<String, ?> sourcePartition = offsetEntry.getKey(); if (sourcePartition == null) { throw new ConnectException("Source partitions may not be null"); } MirrorUtils.validateSourcePartitionString(sourcePartition, SOURCE_CLUSTER_ALIAS_KEY); MirrorUtils.validateSourcePartitionString(sourcePartition, TARGET_CLUSTER_ALIAS_KEY); MirrorUtils.validateSourceOffset(sourcePartition, sourceOffset, true); } // We don't actually use these offsets in the task class, so no additional effort is required beyond just validating // the format of the user-supplied offsets return true; }
@Test public void testAlterOffsetsIncorrectOffsetKey() { MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector(); Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( sourcePartition("primary", "backup"), Collections.singletonMap("unused_offset_key", 0) ); assertThrows(ConnectException.class, () -> connector.alterOffsets(null, offsets)); }
public FunctionEvaluator evaluatorOf(String modelName, String ... names) { return requireModel(modelName).evaluatorOf(names); }
@Test public void testSettingMissingValue() { ModelsEvaluator models = createModels(); { FunctionEvaluator function = models.evaluatorOf("macros", "secondphase"); assertTrue(Double.isNaN(function.evaluate().asDouble())); } { FunctionEvaluator function = models.evaluatorOf("macros", "secondphase"); function.setMissingValue(5); assertEquals(40.0, function.evaluate().asDouble(), delta); } { FunctionEvaluator function = models.evaluatorOf("macros", "secondphase"); function.setMissingValue(5); function.bind("match", 3); assertEquals(32.0, function.evaluate().asDouble(), delta); } }
@Override public void readOutputOf(InputStream in) { StreamPumper.pump(in, outputConsumer, "", consoleLogCharset); }
@Test public void shouldReadOutputOfAGiveStream() { InputStream in = new ByteArrayInputStream((""" Lorem ipsum dolor sit amet, consectetur adipisicing elit,\s used do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi\s ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit\s in voluptate velit esse cillum dolore eu fugiat nulla pariatur.\s Excepteur sint occaecat cupidatat non proident, sunt in culpa qui\s officia deserunt mollit anim id est laborum.""").getBytes()); doNothing().when(safeOutputStreamConsumer).stdOutput(anyString()); console.readOutputOf(in); verify(safeOutputStreamConsumer, timeout(10000).times(7)).stdOutput(anyString()); }
@Override public void visit(Entry entry) { if(Boolean.FALSE.equals(entry.getAttribute("allowed"))) return; if (containsSubmenu(entry)) addSubmenu(entry); else addActionItem(entry); }
@Test public void whenPopupMenuBecomesVisible_itsChildGroupPopupListenerIsCalled() { if(Compat.isMacOsX()) return; menuEntry.addChild(groupEntry); menuActionGroupBuilder.visit(menuEntry); JMenu menu = (JMenu) new EntryAccessor().getComponent(menuEntry); menu.getPopupMenu().setVisible(true); verify(popupListener).childEntriesWillBecomeVisible(groupEntry); }
@Override public int forEachByteDesc(ByteProcessor processor) { ensureAccessible(); try { return forEachByteDesc0(writerIndex - 1, readerIndex, processor); } catch (Exception e) { PlatformDependent.throwException(e); return -1; } }
@Test public void testForEachByteDescAfterRelease1() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().forEachByteDesc(0, 1, new TestByteProcessor()); } }); }
@Override protected void doStop() throws Exception { if (connection != null) { for (IrcChannel channel : endpoint.getConfiguration().getChannelList()) { LOG.debug("Parting: {}", channel); connection.doPart(channel.getName()); } connection.removeIRCEventListener(listener); } super.doStop(); }
@Test public void doStopTest() throws Exception { consumer.doStop(); verify(connection).doPart("#chan1"); verify(connection).doPart("#chan2"); verify(connection).removeIRCEventListener(listener); }
public void setOuterJoinType(OuterJoinType outerJoinType) { this.outerJoinType = outerJoinType; }
@Test void testFullOuterJoinWithPartialMatchingKeys() throws Exception { final List<String> leftInput = Arrays.asList("foo", "bar", "foobar"); final List<String> rightInput = Arrays.asList("bar", "foo", "barfoo"); baseOperator.setOuterJoinType(OuterJoinOperatorBase.OuterJoinType.FULL); List<String> expected = Arrays.asList("bar,bar", "null,barfoo", "foo,foo", "foobar,null"); testOuterJoin(leftInput, rightInput, expected); }
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) { return decoder.decodeFunctionResult(rawInput, outputParameters); }
@Test @SuppressWarnings("unchecked") public void testDecodeDynamicStructWithStaticStruct() { String rawInput = "0x0000000000000000000000000000000000000000000000000000000000000020" + "0000000000000000000000000000000000000000000000000000000000000001" + "000000000000000000000000000000000000000000000000000000000000000a" + "0000000000000000000000000000000000000000000000000000000000000060" + "0000000000000000000000000000000000000000000000000000000000000004" + "6461746100000000000000000000000000000000000000000000000000000000"; assertEquals( FunctionReturnDecoder.decode( rawInput, AbiV2TestFixture.getQuxFunction.getOutputParameters()), Arrays.asList( new AbiV2TestFixture.Qux( new AbiV2TestFixture.Bar(BigInteger.ONE, BigInteger.TEN), "data"))); }
public static Integer parseRestBindPortFromWebInterfaceUrl(String webInterfaceUrl) { if (webInterfaceUrl != null) { final int lastColon = webInterfaceUrl.lastIndexOf(':'); if (lastColon == -1) { return -1; } else { try { return Integer.parseInt(webInterfaceUrl.substring(lastColon + 1)); } catch (NumberFormatException e) { return -1; } } } else { return -1; } }
@Test void testParseRestBindPortFromWebInterfaceUrlWithEmptyUrl() { assertThat(ResourceManagerUtils.parseRestBindPortFromWebInterfaceUrl("")).isEqualTo(-1); }
public static int MCRF4XX(@NonNull final byte[] data, final int offset, final int length) { return CRC(0x1021, 0xFFFF, data, offset, length, true, true, 0x0000); }
@Test public void MCRF4XX_123456789() { final byte[] data = "123456789".getBytes(); assertEquals(0x6F91, CRC16.MCRF4XX(data, 0, 9)); }
ProvidedStorageMap(RwLock lock, BlockManager bm, Configuration conf) { storageId = conf.get(DFSConfigKeys.DFS_PROVIDER_STORAGEUUID, DFSConfigKeys.DFS_PROVIDER_STORAGEUUID_DEFAULT); providedEnabled = conf.getBoolean( DFSConfigKeys.DFS_NAMENODE_PROVIDED_ENABLED, DFSConfigKeys.DFS_NAMENODE_PROVIDED_ENABLED_DEFAULT); if (!providedEnabled) { // disable mapping aliasMap = null; providedDescriptor = null; providedStorageInfo = null; return; } DatanodeStorage ds = new DatanodeStorage( storageId, State.NORMAL, StorageType.PROVIDED); providedDescriptor = new ProvidedDescriptor(); providedStorageInfo = providedDescriptor.createProvidedStorage(ds); this.defaultReplication = conf.getInt(DFSConfigKeys.DFS_REPLICATION_KEY, DFSConfigKeys.DFS_REPLICATION_DEFAULT); this.bm = bm; this.lock = lock; // load block reader into storage Class<? extends BlockAliasMap> aliasMapClass = conf.getClass( DFSConfigKeys.DFS_PROVIDED_ALIASMAP_CLASS, TextFileRegionAliasMap.class, BlockAliasMap.class); aliasMap = ReflectionUtils.newInstance(aliasMapClass, conf); LOG.info("Loaded alias map class: " + aliasMap.getClass() + " storage: " + providedStorageInfo); }
@Test public void testProvidedStorageMap() throws IOException { ProvidedStorageMap providedMap = new ProvidedStorageMap( nameSystemLock, bm, conf); DatanodeStorageInfo providedMapStorage = providedMap.getProvidedStorageInfo(); // the provided storage cannot be null assertNotNull(providedMapStorage); // create a datanode DatanodeDescriptor dn1 = createDatanodeDescriptor(5000); // associate two storages to the datanode DatanodeStorage dn1ProvidedStorage = new DatanodeStorage( providedStorageID, DatanodeStorage.State.NORMAL, StorageType.PROVIDED); DatanodeStorage dn1DiskStorage = new DatanodeStorage( "sid-1", DatanodeStorage.State.NORMAL, StorageType.DISK); when(nameSystemLock.hasWriteLock()).thenReturn(true); DatanodeStorageInfo dns1Provided = providedMap.getStorage(dn1, dn1ProvidedStorage); DatanodeStorageInfo dns1Disk = providedMap.getStorage(dn1, dn1DiskStorage); assertTrue("The provided storages should be equal", dns1Provided == providedMapStorage); assertTrue("Disk storage has not yet been registered with block manager", dns1Disk == null); // add the disk storage to the datanode. DatanodeStorageInfo dnsDisk = new DatanodeStorageInfo(dn1, dn1DiskStorage); dn1.injectStorage(dnsDisk); assertTrue("Disk storage must match the injected storage info", dnsDisk == providedMap.getStorage(dn1, dn1DiskStorage)); // create a 2nd datanode DatanodeDescriptor dn2 = createDatanodeDescriptor(5010); // associate a provided storage with the datanode DatanodeStorage dn2ProvidedStorage = new DatanodeStorage( providedStorageID, DatanodeStorage.State.NORMAL, StorageType.PROVIDED); DatanodeStorageInfo dns2Provided = providedMap.getStorage( dn2, dn2ProvidedStorage); assertTrue("The provided storages should be equal", dns2Provided == providedMapStorage); assertTrue("The DatanodeDescriptor should contain the provided storage", dn2.getStorageInfo(providedStorageID) == providedMapStorage); }
DateRange getRange(String dateRangeString) throws ParseException { if (dateRangeString == null || dateRangeString.isEmpty()) return null; String[] dateArr = dateRangeString.split("-"); if (dateArr.length > 2 || dateArr.length < 1) return null; // throw new IllegalArgumentException("Only Strings containing two Date separated by a '-' or a single Date are allowed"); ParsedCalendar from = parseDateString(dateArr[0]); ParsedCalendar to; if (dateArr.length == 2) to = parseDateString(dateArr[1]); else // faster and safe? // to = new ParsedCalendar(from.parseType, (Calendar) from.parsedCalendar.clone()); to = parseDateString(dateArr[0]); try { return new DateRange(from, to); } catch (IllegalArgumentException ex) { return null; } }
@Test public void testParseSimpleDateRange() throws ParseException { DateRange dateRange = dateRangeParser.getRange("2014 Aug 10-2014 Aug 14"); assertFalse(dateRange.isInRange(getCalendar(2014, Calendar.AUGUST, 9))); assertTrue(dateRange.isInRange(getCalendar(2014, Calendar.AUGUST, 10))); assertTrue(dateRange.isInRange(getCalendar(2014, Calendar.AUGUST, 12))); assertTrue(dateRange.isInRange(getCalendar(2014, Calendar.AUGUST, 14))); assertFalse(dateRange.isInRange(getCalendar(2014, Calendar.AUGUST, 15))); }
public FilterAggregationBuilder buildTermTopAggregation( String topAggregationName, TopAggregationDefinition<?> topAggregation, @Nullable Integer numberOfTerms, Consumer<BoolQueryBuilder> extraFilters, Consumer<FilterAggregationBuilder> otherSubAggregations ) { Consumer<FilterAggregationBuilder> subAggregations = t -> { t.subAggregation(subAggregationHelper.buildTermsAggregation(topAggregationName, topAggregation, numberOfTerms)); otherSubAggregations.accept(t); }; return buildTopAggregation(topAggregationName, topAggregation, extraFilters, subAggregations); }
@Test public void buildTermTopAggregation_has_empty_filter_when_FiltersComputer_returns_empty_for_TopAggregation() { SimpleFieldTopAggregationDefinition topAggregation = new SimpleFieldTopAggregationDefinition("bar", false); SimpleFieldTopAggregationDefinition otherTopAggregation = new SimpleFieldTopAggregationDefinition("acme", false); BoolQueryBuilder otherFilter = boolQuery(); when(filtersComputer.getTopAggregationFilter(topAggregation)).thenReturn(Optional.empty()); when(filtersComputer.getTopAggregationFilter(otherTopAggregation)).thenReturn(Optional.of(otherFilter)); String topAggregationName = randomAlphabetic(10); TermsAggregationBuilder termSubAgg = AggregationBuilders.terms("foo"); when(subAggregationHelper.buildTermsAggregation(topAggregationName, topAggregation, null)).thenReturn(termSubAgg); FilterAggregationBuilder aggregationBuilder = underTest.buildTermTopAggregation( topAggregationName, topAggregation, null, NO_EXTRA_FILTER, NO_OTHER_SUBAGGREGATION); assertThat(aggregationBuilder.getName()).isEqualTo(topAggregationName); assertThat(aggregationBuilder.getFilter()).isEqualTo(boolQuery()).isNotSameAs(otherFilter); }
public static List<PKafkaOffsetProxyResult> getBatchOffsets(List<PKafkaOffsetProxyRequest> requests) throws UserException { return PROXY_API.getBatchOffsets(requests); }
@Test public void testGetInfoValidateObjectException() throws UserException, RpcException { Backend backend = new Backend(1L, "127.0.0.1", 9050); backend.setBeRpcPort(8060); backend.setAlive(true); new Expectations() { { service.getBackendOrComputeNode(anyLong); result = backend; client.getInfo((TNetworkAddress) any, (PProxyRequest) any); result = new RpcException("Unable to validate object"); } }; KafkaUtil.ProxyAPI api = new KafkaUtil.ProxyAPI(); LoadException e = Assert.assertThrows(LoadException.class, () -> api.getBatchOffsets(null)); Assert.assertTrue(e.getMessage().contains("err: BE is not alive")); }
public Response get(URL url, Request request) throws IOException { return call(HttpMethods.GET, url, request); }
@Test public void testGet_originalRequestHeaderUntouchedWhenClearingHeader() throws IOException { FailoverHttpClient insecureHttpClient = newHttpClient(true, false); Request request = fakeRequest(null); try (Response response = insecureHttpClient.get(new URL("http://plain.http"), request)) { // intentionally empty } Assert.assertEquals(1, urlCaptor.getAllValues().size()); Assert.assertEquals(1, httpHeadersCaptor.getAllValues().size()); Assert.assertNull(httpHeadersCaptor.getValue().getAuthorization()); Assert.assertEquals( "Basic ZmFrZS11c2VybmFtZTpmYWtlLXNlY3JldA==", request.getHeaders().getAuthorization()); }
@SuppressWarnings("rawtypes") @Deprecated public synchronized Topology addProcessor(final String name, final org.apache.kafka.streams.processor.ProcessorSupplier supplier, final String... parentNames) { return addProcessor( name, new ProcessorSupplier<Object, Object, Object, Object>() { @Override public Set<StoreBuilder<?>> stores() { return supplier.stores(); } @Override public org.apache.kafka.streams.processor.api.Processor<Object, Object, Object, Object> get() { return ProcessorAdapter.adaptRaw(supplier.get()); } }, parentNames ); }
@Test public void shouldNotAllowNullNameWhenAddingProcessor() { assertThrows(NullPointerException.class, () -> topology.addProcessor(null, () -> new MockApiProcessorSupplier<>().get())); }
public void isEqualTo(@Nullable Object expected) { standardIsEqualTo(expected); }
@Test public void isEqualToNullBadEqualsImplementation() { expectFailure.whenTesting().that(new ThrowsOnEqualsNull()).isEqualTo(null); }
@Override public String newStoreName(final String prefix) { return prefix + String.format(KTableImpl.STATE_STORE_NAME + "%010d", index.getAndIncrement()); }
@Test public void testNewStoreName() { assertEquals("X-STATE-STORE-0000000000", builder.newStoreName("X-")); assertEquals("Y-STATE-STORE-0000000001", builder.newStoreName("Y-")); assertEquals("Z-STATE-STORE-0000000002", builder.newStoreName("Z-")); final InternalStreamsBuilder newBuilder = new InternalStreamsBuilder(new InternalTopologyBuilder()); assertEquals("X-STATE-STORE-0000000000", newBuilder.newStoreName("X-")); assertEquals("Y-STATE-STORE-0000000001", newBuilder.newStoreName("Y-")); assertEquals("Z-STATE-STORE-0000000002", newBuilder.newStoreName("Z-")); }
@Transactional @Cacheable(CACHE_DATABASE_SEARCH) @CacheEvict(value = CACHE_AVERAGE_REVIEW_RATING, allEntries = true) public SearchHits<ExtensionSearch> search(ISearchService.Options options) { // grab all extensions var matchingExtensions = repositories.findAllActiveExtensions(); // no extensions in the database if (matchingExtensions.isEmpty()) { return new SearchHitsImpl<>(0,TotalHitsRelation.OFF, 0f, null, null, Collections.emptyList(), null, null); } // exlude namespaces if(options.namespacesToExclude != null) { for(var namespaceToExclude : options.namespacesToExclude) { matchingExtensions = matchingExtensions.filter(extension -> !extension.getNamespace().getName().equals(namespaceToExclude)); } } // filter target platform if(TargetPlatform.isValid(options.targetPlatform)) { matchingExtensions = matchingExtensions.filter(extension -> extension.getVersions().stream().anyMatch(ev -> ev.getTargetPlatform().equals(options.targetPlatform))); } // filter category if (options.category != null) { matchingExtensions = matchingExtensions.filter(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); return latest.getCategories().stream().anyMatch(category -> category.equalsIgnoreCase(options.category)); }); } // filter text if (options.queryString != null) { matchingExtensions = matchingExtensions.filter(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); return extension.getName().toLowerCase().contains(options.queryString.toLowerCase()) || extension.getNamespace().getName().contains(options.queryString.toLowerCase()) || (latest.getDescription() != null && latest.getDescription() .toLowerCase().contains(options.queryString.toLowerCase())) || (latest.getDisplayName() != null && latest.getDisplayName() .toLowerCase().contains(options.queryString.toLowerCase())); }); } // need to perform the sortBy () // 'relevance' | 'timestamp' | 'rating' | 'downloadCount'; Stream<ExtensionSearch> searchEntries; if("relevance".equals(options.sortBy) || "rating".equals(options.sortBy)) { var searchStats = new SearchStats(repositories); searchEntries = matchingExtensions.stream().map(extension -> relevanceService.toSearchEntry(extension, searchStats)); } else { searchEntries = matchingExtensions.stream().map(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); var targetPlatforms = repositories.findExtensionTargetPlatforms(extension); return extension.toSearch(latest, targetPlatforms); }); } var comparators = new HashMap<>(Map.of( "relevance", new RelevanceComparator(), "timestamp", new TimestampComparator(), "rating", new RatingComparator(), "downloadCount", new DownloadedCountComparator() )); var comparator = comparators.get(options.sortBy); if(comparator != null) { searchEntries = searchEntries.sorted(comparator); } var sortedExtensions = searchEntries.collect(Collectors.toList()); // need to do sortOrder // 'asc' | 'desc'; if ("desc".equals(options.sortOrder)) { // reverse the order Collections.reverse(sortedExtensions); } // Paging var totalHits = sortedExtensions.size(); var endIndex = Math.min(sortedExtensions.size(), options.requestedOffset + options.requestedSize); var startIndex = Math.min(endIndex, options.requestedOffset); sortedExtensions = sortedExtensions.subList(startIndex, endIndex); List<SearchHit<ExtensionSearch>> searchHits; if (sortedExtensions.isEmpty()) { searchHits = Collections.emptyList(); } else { // client is interested only in the extension IDs searchHits = sortedExtensions.stream().map(extensionSearch -> new SearchHit<>(null, null, null, 0.0f, null, null, null, null, null, null, extensionSearch)).collect(Collectors.toList()); } return new SearchHitsImpl<>(totalHits, TotalHitsRelation.OFF, 0f, null, null, searchHits, null, null); }
@Test public void testSimplePageSize() { var ext1 = mockExtension("ext1", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext2 = mockExtension("ext2", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext3 = mockExtension("ext3", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext4 = mockExtension("ext4", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext5 = mockExtension("ext5", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext6 = mockExtension("ext6", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); var ext7 = mockExtension("ext7", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); Mockito.when(repositories.findAllActiveExtensions()).thenReturn(Streamable.of(List.of(ext1, ext2, ext3, ext4, ext5, ext6, ext7))); var pageSizeItems = 5; var searchOptions = new ISearchService.Options(null, null, TargetPlatform.NAME_UNIVERSAL, pageSizeItems, 0, null, null, false); var result = search.search(searchOptions); // 7 total hits assertThat(result.getTotalHits()).isEqualTo(7); // but as we limit the page size it should only contains 5 var hits = result.getSearchHits(); assertThat(hits.size()).isEqualTo(pageSizeItems); assertThat(getIdFromExtensionHits(hits, 0)).isEqualTo(getIdFromExtensionName("ext1")); assertThat(getIdFromExtensionHits(hits, 1)).isEqualTo(getIdFromExtensionName("ext2")); assertThat(getIdFromExtensionHits(hits, 2)).isEqualTo(getIdFromExtensionName("ext3")); assertThat(getIdFromExtensionHits(hits, 3)).isEqualTo(getIdFromExtensionName("ext4")); assertThat(getIdFromExtensionHits(hits, 4)).isEqualTo(getIdFromExtensionName("ext5")); }
public ConnectionContext getConnection(UserGroupInformation ugi, String nnAddress, Class<?> protocol, String nsId) throws IOException { // Check if the manager is shutdown if (!this.running) { LOG.error( "Cannot get a connection to {} because the manager isn't running", nnAddress); return null; } // Try to get the pool if created ConnectionPoolId connectionId = new ConnectionPoolId(ugi, nnAddress, protocol); ConnectionPool pool = null; readLock.lock(); try { pool = this.pools.get(connectionId); } finally { readLock.unlock(); } // Create the pool if not created before if (pool == null) { writeLock.lock(); try { pool = this.pools.get(connectionId); if (pool == null) { pool = new ConnectionPool( this.conf, nnAddress, ugi, this.minSize, this.maxSize, this.minActiveRatio, protocol, new PoolAlignmentContext(this.routerStateIdContext, nsId)); this.pools.put(connectionId, pool); } } finally { writeLock.unlock(); } } long clientStateId = RouterStateIdContext.getClientStateIdFromCurrentCall(nsId); pool.getPoolAlignmentContext().advanceClientStateId(clientStateId); ConnectionContext conn = pool.getConnection(); // Add a new connection to the pool if it wasn't usable if (conn == null || !conn.isUsable()) { if (!this.creatorQueue.contains(pool) && !this.creatorQueue.offer(pool)) { LOG.error("Cannot add more than {} connections at the same time", this.creatorQueueMaxSize); } } if (conn != null && conn.isClosed()) { LOG.error("We got a closed connection from {}", pool); conn = null; } return conn; }
@Test public void testValidClientIndex() throws Exception { ConnectionPool pool = new ConnectionPool(conf, TEST_NN_ADDRESS, TEST_USER1, 2, 2, 0.5f, ClientProtocol.class, null); for(int i = -3; i <= 3; i++) { pool.getClientIndex().set(i); ConnectionContext conn = pool.getConnection(); assertNotNull(conn); assertTrue(conn.isUsable()); } }
@Override public Optional<ExecuteResult> getSaneQueryResult(final SQLStatement sqlStatement, final SQLException ex) { if (ER_PARSE_ERROR == ex.getErrorCode()) { return Optional.empty(); } if (sqlStatement instanceof SelectStatement) { return createQueryResult((SelectStatement) sqlStatement); } if (sqlStatement instanceof MySQLShowOtherStatement) { return Optional.of(createQueryResult()); } if (sqlStatement instanceof MySQLSetStatement) { return Optional.of(new UpdateResult(0, 0L)); } return Optional.empty(); }
@Test void assertGetSaneQueryResultForSelectStatementWithFrom() { MySQLSelectStatement selectStatement = new MySQLSelectStatement(); selectStatement.setFrom(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t")))); assertThat(new MySQLDialectSaneQueryResultEngine().getSaneQueryResult(selectStatement, new SQLException("")), is(Optional.empty())); }
@VisibleForTesting int execute(String[] args) { return commander.execute(args); }
@Test public void testCreateToken() { PrintStream oldStream = System.out; try { ByteArrayOutputStream baoStream = new ByteArrayOutputStream(); System.setOut(new PrintStream(baoStream)); new TokensCliUtils().execute(new String[]{"create-secret-key", "--base64"}); String secretKey = baoStream.toString(); baoStream.reset(); String[] command = {"create", "--secret-key", "data:;base64," + secretKey, "--subject", "test", "--headers", "kid=test", "--headers", "my-k=my-v" }; new TokensCliUtils().execute(command); String token = baoStream.toString(); Jwt<?, ?> jwt = Jwts.parserBuilder() .setSigningKey(Decoders.BASE64.decode(secretKey)) .build() .parseClaimsJws(token); JwsHeader header = (JwsHeader) jwt.getHeader(); String keyId = header.getKeyId(); assertEquals(keyId, "test"); assertEquals(header.get("my-k"), "my-v"); } catch (Exception e) { throw new RuntimeException(e); } finally { System.setOut(oldStream); } }
public void isAnyOf( @Nullable Object first, @Nullable Object second, @Nullable Object @Nullable ... rest) { isIn(accumulate(first, second, rest)); }
@Test public void isAnyOfNonnullInListWithNull() { assertThat("b").isAnyOf("a", "b", (String) null); }
@Override public void clear() { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void test_clear() { set.clear(); }
@Override public String render(String text) { if (StringUtils.isBlank(text)) { return ""; } if (regex.isEmpty() || link.isEmpty()) { Comment comment = new Comment(); comment.escapeAndAdd(text); return comment.render(); } try { Matcher matcher = Pattern.compile(regex).matcher(text); int start = 0; Comment comment = new Comment(); while (hasMatch(matcher)) { comment.escapeAndAdd(text.substring(start, matcher.start())); comment.add(dynamicLink(matcher)); start = matcher.end(); } comment.escapeAndAdd(text.substring(start)); return comment.render(); } catch (PatternSyntaxException e) { LOGGER.warn("Illegal regular expression: {} - {}", regex, e.getMessage()); } return text; }
@Test public void shouldRenderStringWithoutSpecifiedRegexAndLinkIfHasGroupsAndNoneMaterialize() throws Exception { String link = "http://mingle05/projects/cce/cards/${ID}"; String regex = "evo-(\\d+)|evo-"; trackingTool = new DefaultCommentRenderer(link, regex); String result = trackingTool.render("evo-abc: checkin message"); assertThat(result, is("evo-abc: checkin message")); }
@Override public synchronized void write(int b) throws IOException { checkNotClosed(); file.writeLock().lock(); try { if (append) { pos = file.sizeWithoutLocking(); } file.write(pos++, (byte) b); file.setLastModifiedTime(fileSystemState.now()); } finally { file.writeLock().unlock(); } }
@Test public void testWrite_partialArray_overwriting() throws IOException { JimfsOutputStream out = newOutputStream(false); addBytesToStore(out, 9, 8, 7, 6, 5, 4, 3); out.write(new byte[] {1, 2, 3, 4, 5, 6}, 1, 3); assertStoreContains(out, 2, 3, 4, 6, 5, 4, 3); }
public static GcsUri createGcsUri(String bucket, String path) { return new GcsUri(createUri(bucket, toAbsolutePath(path))); }
@Test public void testNonAbsolutePath() { // Relative path must be normalized to absolute path for gcs uri // This is because the URI must have an absolute path component, // ex. new URI("gs", "bucket", "/dir/file", null, null) GcsUri gcsUri = createGcsUri("bucket", "dir/file"); assertEquals(gcsUri, createGcsUri("bucket", "/dir/file")); }
public static String toJsonString(final Token<? extends TokenIdentifier> token ) throws IOException { return toJsonString(Token.class, toJsonMap(token)); }
@Test public void testToJsonFromAclStatus() { String jsonString = "{\"AclStatus\":{\"entries\":[\"user:user1:rwx\",\"group::rw-\"],\"group\":\"supergroup\",\"owner\":\"testuser\",\"stickyBit\":false}}"; AclStatus.Builder aclStatusBuilder = new AclStatus.Builder(); aclStatusBuilder.owner("testuser"); aclStatusBuilder.group("supergroup"); aclStatusBuilder.stickyBit(false); List<AclEntry> aclSpec = Lists.newArrayList(aclEntry(ACCESS, USER,"user1", ALL), aclEntry(ACCESS, GROUP, READ_WRITE)); aclStatusBuilder.addEntries(aclSpec); Assert.assertEquals(jsonString, JsonUtil.toJsonString(aclStatusBuilder.build())); }
public DataSchemaParser.ParseResult parseSources(String[] rawSources) throws IOException { Set<String> fileExtensions = _parserByFileExtension.keySet(); Map<String, List<String>> byExtension = new HashMap<>(fileExtensions.size()); for (String fileExtension : fileExtensions) { byExtension.put(fileExtension, new ArrayList<>()); } String[] sortedSources = Arrays.copyOf(rawSources, rawSources.length); Arrays.sort(sortedSources); // Extract all schema files from the given source paths and group by extension (JARs are handled specially) for (String source : sortedSources) { final File sourceFile = new File(source); if (sourceFile.exists()) { if (sourceFile.isDirectory()) { // Source path is a directory, so recursively find all schema files contained therein final FileExtensionFilter filter = new FileExtensionFilter(fileExtensions); final List<File> sourceFilesInDirectory = FileUtil.listFiles(sourceFile, filter); // Add each schema to the corresponding extension's source list for (File f : sourceFilesInDirectory) { String ext = FilenameUtils.getExtension(f.getName()); List<String> filesForExtension = byExtension.get(ext); if (filesForExtension != null) { filesForExtension.add(f.getAbsolutePath()); } } } else if (sourceFile.getName().endsWith(".jar")) { // Source path is a JAR, so add it to each extension's source list. // The file-based parser for each extension will extract the JAR and process only files matching the extension byExtension.values().forEach(files -> files.add(sourceFile.getAbsolutePath())); } else { // Source path is a non-JAR file, so add it to the corresponding extension's source list String ext = FilenameUtils.getExtension(sourceFile.getName()); List<String> filesForExtension = byExtension.get(ext); if (filesForExtension != null) { filesForExtension.add(sourceFile.getAbsolutePath()); } } } } // Parse all schema files and JARs using the appropriate file format parser final ParseResult result = new ParseResult(); for (Map.Entry<String, List<String>> entry : byExtension.entrySet()) { String ext = entry.getKey(); List<String> files = entry.getValue(); _parserByFileExtension.get(ext).parseSources(files.toArray(new String[files.size()]), result); } return result; }
@Test public void testParseFromJarFileWithTranslatedSchemas() throws Exception { String tempDirectoryPath = _tempDir.getAbsolutePath(); String jarFile = tempDirectoryPath + "/testWithTranslatedSchemas.jar"; Map<String, String> jarFiles = new HashMap<>(); // Add the source PDL file to the pegasus directory String pdlFile = TEST_RESOURCES_DIR + FS + "WithoutResolverExamplePdl.pdl"; String pdlJarDestination = SCHEMA_PATH_PREFIX + "WithoutResolverExamplePdl.pdl"; jarFiles.put(pdlFile, pdlJarDestination); // Translated PDSC files go to "legacyPegasusSchemas", which should be ignored by parser. String translatedPegasusFile = TEST_RESOURCES_DIR + FS + "WithoutResolverExample.pdsc"; String translatedFileDestination = "legacyPegasusSchemas/WithoutResolverExample.pdsc"; jarFiles.put(translatedPegasusFile, translatedFileDestination); createTempJarFile(jarFiles, jarFile); DataSchemaParser parser = new DataSchemaParser.Builder(tempDirectoryPath).build(); DataSchemaParser.ParseResult parseResult = parser.parseSources(new String[]{jarFile}); // Two schemas, WithoutResolverExample and InlineRecord (defined inline in WithoutResolverExample) assertEquals(parseResult.getSchemaAndLocations().size(), 2); Set<String> schemaNames = parseResult.getSchemaAndLocations().keySet().stream().map(DataSchema::getUnionMemberKey).collect( Collectors.toSet()); assertTrue(schemaNames.contains("WithoutResolverExamplePdl")); assertTrue(schemaNames.contains("InlineRecord")); parseResult.getSchemaAndLocations().values().forEach(loc -> assertEquals(loc.getSourceFile().getAbsolutePath(), jarFile)); }
@Override public String getName() { return "output_message"; }
@Test void testGetName() { assertEquals("output_message", outputMessageAction.getName()); }
@CanIgnoreReturnValue public final Ordered containsAtLeastEntriesIn(Multimap<?, ?> expectedMultimap) { checkNotNull(expectedMultimap, "expectedMultimap"); checkNotNull(actual); ListMultimap<?, ?> missing = difference(expectedMultimap, actual); // TODO(kak): Possible enhancement: Include "[1 copy]" if the element does appear in // the subject but not enough times. Similarly for unexpected extra items. if (!missing.isEmpty()) { failWithActual( fact("missing", countDuplicatesMultimap(annotateEmptyStringsMultimap(missing))), simpleFact("---"), fact("expected to contain at least", annotateEmptyStringsMultimap(expectedMultimap))); return ALREADY_FAILED; } return new MultimapInOrder(/* allowUnexpected = */ true, expectedMultimap); }
@Test public void containsAtLeastEmpty() { ImmutableListMultimap<Integer, String> actual = ImmutableListMultimap.of(3, "one"); ImmutableSetMultimap<Integer, String> expected = ImmutableSetMultimap.of(); assertThat(actual).containsAtLeastEntriesIn(expected); assertThat(actual).containsAtLeastEntriesIn(expected).inOrder(); }
public static BufferedImage createSprite(ItemProvider itemProvider, ModelProvider modelProvider, SpriteProvider spriteProvider, TextureProvider textureProvider, int itemId, int quantity, int border, int shadowColor, boolean noted) throws IOException { SpritePixels spritePixels = createSpritePixels(itemProvider, modelProvider, spriteProvider, textureProvider, itemId, quantity, border, shadowColor, noted); return spritePixels == null ? null : spritePixels.toBufferedImage(); }
@Test @Ignore public void test() throws IOException { File base = StoreLocation.LOCATION, outDir = folder.newFolder(); int count = 0; try (Store store = new Store(base)) { store.load(); ItemManager itemManager = new ItemManager(store); itemManager.load(); itemManager.link(); ModelProvider modelProvider = new ModelProvider() { @Override public ModelDefinition provide(int modelId) throws IOException { Index models = store.getIndex(IndexType.MODELS); Archive archive = models.getArchive(modelId); byte[] data = archive.decompress(store.getStorage().loadArchive(archive)); ModelDefinition inventoryModel = new ModelLoader().load(modelId, data); return inventoryModel; } }; SpriteManager spriteManager = new SpriteManager(store); spriteManager.load(); TextureManager textureManager = new TextureManager(store); textureManager.load(); for (ItemDefinition itemDef : itemManager.getItems()) { if (itemDef.name == null || itemDef.name.equalsIgnoreCase("null")) { continue; } try { BufferedImage sprite = ItemSpriteFactory.createSprite(itemManager, modelProvider, spriteManager, textureManager, itemDef.id, 1, 1, 3153952, false); File out = new File(outDir, itemDef.id + ".png"); BufferedImage img = sprite; ImageIO.write(img, "PNG", out); ++count; } catch (Exception ex) { log.warn("error dumping item {}", itemDef.id, ex); } } } log.info("Dumped {} item images to {}", count, outDir); }
public static FailoverStrategy.Factory loadFailoverStrategyFactory(final Configuration config) { checkNotNull(config); final String strategyParam = config.get(JobManagerOptions.EXECUTION_FAILOVER_STRATEGY); switch (strategyParam.toLowerCase()) { case FULL_RESTART_STRATEGY_NAME: return new RestartAllFailoverStrategy.Factory(); case PIPELINED_REGION_RESTART_STRATEGY_NAME: return new RestartPipelinedRegionFailoverStrategy.Factory(); default: throw new IllegalConfigurationException( "Unknown failover strategy: " + strategyParam); } }
@Test void testLoadFromInvalidConfiguration() { final Configuration config = new Configuration(); config.set(JobManagerOptions.EXECUTION_FAILOVER_STRATEGY, "invalidStrategy"); assertThatThrownBy(() -> FailoverStrategyFactoryLoader.loadFailoverStrategyFactory(config)) .isInstanceOf(IllegalConfigurationException.class); }
protected static Long parseTimestampAsMsSinceEpoch(String timestamp) { if (timestamp.isEmpty()) { throw new IllegalArgumentException("Empty timestamp."); } try { // Try parsing as milliseconds since epoch. Note there is no way to parse a // string in RFC 3339 format here. // Expected IllegalArgumentException if parsing fails; we use that to fall back // to RFC 3339. return Long.parseLong(timestamp); } catch (IllegalArgumentException e1) { // Try parsing as RFC3339 string. DateTime.parseRfc3339 will throw an // IllegalArgumentException if parsing fails, and the caller should handle. return DateTime.parseRfc3339(timestamp).getValue(); } }
@Test public void noTimestampAttributeAndInvalidPubsubPublishThrowsError() { thrown.expect(NumberFormatException.class); PubsubClient.parseTimestampAsMsSinceEpoch("not-a-date"); }
public static boolean checkFileExist(String file) { return FILES.contains(file); }
@Test public void testAfterHttpDataReleaseCheckFileExist() throws IOException { String filePath = fu.getFile().getPath(); assertTrue(DeleteFileOnExitHook.checkFileExist(filePath)); fu.release(); assertFalse(DeleteFileOnExitHook.checkFileExist(filePath)); }
@Override public Object convert(String value) { if (value == null || value.isEmpty()) { return value; } Integer priority = Ints.tryParse(value); if (priority == null) { return value; } return SyslogPriUtilities.levelFromPriority(priority); }
@Test public void testConvert() throws Exception { Converter hc = new SyslogPriLevelConverter(new HashMap<String, Object>()); assertNull(hc.convert(null)); assertEquals("", hc.convert("")); assertEquals("lol no number", hc.convert("lol no number")); assertEquals(6, hc.convert("14")); // info assertEquals(4, hc.convert("12")); // warning assertEquals(7, hc.convert("7")); // debug assertEquals(7, hc.convert("87")); // debug assertEquals(5, hc.convert("5")); // notice }
@Nonnull public static <T> Traverser<T> traverseSpliterator(@Nonnull Spliterator<T> spliterator) { return new SpliteratorTraverser<>(spliterator); }
@Test public void when_traverseSpliterator_then_seeAllItems() { validateTraversal(traverseSpliterator(Stream.of(1, 2).spliterator())); }
public static void main(String[] args) throws InterruptedException { var bank = new Bank(ACCOUNT_NUM, BASE_AMOUNT); var latch = new CountDownLatch(NUMBER_OF_THREADS); var executorService = Executors.newFixedThreadPool(NUMBER_OF_THREADS); for (int i = 0; i < NUMBER_OF_THREADS; i++) { executorService.execute(() -> runner(bank, latch)); } latch.await(); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> Main.main(new String[] {})); }
List<Condition> run(boolean useKRaft) { List<Condition> warnings = new ArrayList<>(); checkKafkaReplicationConfig(warnings); checkKafkaBrokersStorage(warnings); if (useKRaft) { // Additional checks done for KRaft clusters checkKRaftControllerStorage(warnings); checkKRaftControllerCount(warnings); checkKafkaMetadataVersion(warnings); checkInterBrokerProtocolVersionInKRaft(warnings); checkLogMessageFormatVersionInKRaft(warnings); } else { // Additional checks done for ZooKeeper-based clusters checkKafkaLogMessageFormatVersion(warnings); checkKafkaInterBrokerProtocolVersion(warnings); checkKRaftMetadataStorageConfiguredForZooBasedCLuster(warnings); } return warnings; }
@Test public void testKRaftWithEvenNumberOfMixedNodes() { Kafka kafka = new KafkaBuilder(KAFKA) .editSpec() .editKafka() .withConfig(Map.of( // We want to avoid unrelated warnings KafkaConfiguration.DEFAULT_REPLICATION_FACTOR, 1, KafkaConfiguration.MIN_INSYNC_REPLICAS, 1 )) .endKafka() .endSpec() .build(); KafkaNodePool mixed = new KafkaNodePoolBuilder(MIXED) .editSpec() .withReplicas(4) .endSpec() .build(); KafkaSpecChecker checker = generateChecker(kafka, List.of(mixed), KafkaVersionTestUtils.DEFAULT_KRAFT_VERSION_CHANGE); List<Condition> warnings = checker.run(true); assertThat(warnings, hasSize(1)); assertThat(warnings.get(0).getReason(), is("KafkaKRaftControllerNodeCount")); assertThat(warnings.get(0).getMessage(), is("Running KRaft controller quorum with an odd number of nodes is recommended.")); }
@CanIgnoreReturnValue public final Ordered containsExactly(@Nullable Object @Nullable ... varargs) { List<@Nullable Object> expected = (varargs == null) ? newArrayList((@Nullable Object) null) : asList(varargs); return containsExactlyElementsIn( expected, varargs != null && varargs.length == 1 && varargs[0] instanceof Iterable); }
@Test public void iterableContainsExactlyWithNullOutOfOrder() { assertThat(asList(1, null, 3)).containsExactly(1, 3, (Integer) null); }
@Override public JDocCommentable apply(String nodeName, JsonNode node, JsonNode parent, JDocCommentable generatableType, Schema schema) { if (node.asBoolean()) { generatableType.javadoc().append("\n(Required)"); if (ruleFactory.getGenerationConfig().isIncludeJsr303Annotations() && generatableType instanceof JFieldVar) { final Class<? extends Annotation> notNullClass = ruleFactory.getGenerationConfig().isUseJakartaValidation() ? NotNull.class : javax.validation.constraints.NotNull.class; ((JFieldVar) generatableType).annotate(notNullClass); } if (ruleFactory.getGenerationConfig().isIncludeJsr305Annotations() && generatableType instanceof JFieldVar) { ((JFieldVar) generatableType).annotate(Nonnull.class); } } else { if (ruleFactory.getGenerationConfig().isIncludeJsr305Annotations() && generatableType instanceof JFieldVar) { ((JFieldVar) generatableType).annotate(Nullable.class); } } return generatableType; }
@Test public void applySkipsTextWhenNotRequired() throws JClassAlreadyExistsException { JDefinedClass jclass = new JCodeModel()._class(TARGET_CLASS_NAME); ObjectMapper mapper = new ObjectMapper(); BooleanNode descriptionNode = mapper.createObjectNode().booleanNode(false); JDocCommentable result = rule.apply("fooBar", descriptionNode, null, jclass, null); assertThat(result.javadoc(), sameInstance(jclass.javadoc())); assertThat(result.javadoc().size(), is(0)); }
public static boolean parse(final String str, ResTable_config out) { return parse(str, out, true); }
@Test public void parse_mcc_mnc_upperCase() { ResTable_config config = new ResTable_config(); ConfigDescription.parse("mcc310-mnc004", config); assertThat(config.mcc).isEqualTo(310); assertThat(config.mnc).isEqualTo(4); }
@Override public String getPrefix(String namespaceURI) { throw new UnsupportedOperationException("Operation not supported"); }
@Test public void testGetPrefix() { try { camelSpringNamespace.getPrefix(Constants.XML_SCHEMA_NAMESPACE_URI); fail("UnsupportedOperationException expected"); } catch (UnsupportedOperationException e) { // Expected. } }
@Bean @ConfigurationProperties(prefix = "shenyu.register") public ShenyuRegisterCenterConfig shenyuRegisterCenterConfig() { return new ShenyuRegisterCenterConfig(); }
@Test public void testShenyuRegisterCenterConfig() { assertEquals(ShenyuRegisterCenterConfig.class, registerCenterConfiguration.shenyuRegisterCenterConfig().getClass()); }
@Override public V fetch(final K key, final long time) { Objects.requireNonNull(key, "key can't be null"); final List<ReadOnlyWindowStore<K, V>> stores = provider.stores(storeName, windowStoreType); for (final ReadOnlyWindowStore<K, V> windowStore : stores) { try { final V result = windowStore.fetch(key, time); if (result != null) { return result; } } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException( "State store is not available anymore and may have been migrated to another instance; " + "please re-discover its location from the state metadata."); } } return null; }
@Test public void shouldFetchValuesFromWindowStore() { underlyingWindowStore.put("my-key", "my-value", 0L); underlyingWindowStore.put("my-key", "my-later-value", 10L); assertEquals( asList(new KeyValue<>(0L, "my-value"), new KeyValue<>(10L, "my-later-value")), StreamsTestUtils.toList(windowStore.fetch("my-key", ofEpochMilli(0L), ofEpochMilli(25L))) ); }
@Override public ProtobufSystemInfo.Section toProtobuf() { return toProtobuf(ManagementFactory.getMemoryMXBean()); }
@Test public void should_hide_attributes_without_values() { MemoryMXBean memoryBean = mock(MemoryMXBean.class, Mockito.RETURNS_DEEP_STUBS); when(memoryBean.getHeapMemoryUsage().getCommitted()).thenReturn(-1L); JvmStateSection underTest = new JvmStateSection(PROCESS_NAME); ProtobufSystemInfo.Section section = underTest.toProtobuf(memoryBean); assertThat(section.getAttributesList()) .extracting("key") .isNotEmpty() .doesNotContain("Heap Committed (MB)"); }
public synchronized void maybeAddPartition(TopicPartition topicPartition) { maybeFailWithError(); throwIfPendingState("send"); if (isTransactional()) { if (!hasProducerId()) { throw new IllegalStateException("Cannot add partition " + topicPartition + " to transaction before completing a call to initTransactions"); } else if (currentState != State.IN_TRANSACTION) { throw new IllegalStateException("Cannot add partition " + topicPartition + " to transaction while in state " + currentState); } else if (isPartitionAdded(topicPartition) || isPartitionPendingAdd(topicPartition)) { return; } else { log.debug("Begin adding new partition {} to transaction", topicPartition); txnPartitionMap.getOrCreate(topicPartition); newPartitionsInTransaction.add(topicPartition); } } }
@Test public void testNotReadyForSendBeforeBeginTransaction() { doInitTransactions(); assertThrows(IllegalStateException.class, () -> transactionManager.maybeAddPartition(tp0)); }
@Override public void execute(String commandName, BufferedReader reader, BufferedWriter writer) throws Py4JException, IOException { char subCommand = safeReadLine(reader).charAt(0); String returnCommand = null; if (subCommand == LIST_SLICE_SUB_COMMAND_NAME) { returnCommand = slice_list(reader); } else if (subCommand == LIST_CONCAT_SUB_COMMAND_NAME) { returnCommand = concat_list(reader); } else if (subCommand == LIST_MULT_SUB_COMMAND_NAME) { returnCommand = mult_list(reader); } else if (subCommand == LIST_IMULT_SUB_COMMAND_NAME) { returnCommand = imult_list(reader); } else if (subCommand == LIST_COUNT_SUB_COMMAND_NAME) { returnCommand = count_list(reader); } else { returnCommand = call_collections_method(reader, subCommand); } logger.finest("Returning command: " + returnCommand); writer.write(returnCommand); writer.flush(); }
@Test public void testMaxException() { String inputCommand = ListCommand.LIST_MAX_SUB_COMMAND_NAME + "\n" + target2 + "\ne\n"; try { command.execute("l", new BufferedReader(new StringReader(inputCommand)), writer); assertEquals("!x\n", sWriter.toString()); } catch (Exception e) { e.printStackTrace(); fail(); } }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final AttributedList<Path> result = new AttributedList<>(); try { final SMBSession.DiskShareWrapper share = session.openShare(directory); final List<FileIdBothDirectoryInformation> info; try { info = share.get().list(new SMBPathContainerService(session).getKey(directory)); } finally { session.releaseShare(share); } for(FileIdBothDirectoryInformation f : info) { final String filename = f.getFileName(); if(filename.equals(".") || filename.equals("..")) { if(log.isDebugEnabled()) { log.debug(String.format("Skip %s", f.getFileName())); } continue; } final EnumSet<Type> type = EnumSet.noneOf(Type.class); long fileAttributes = f.getFileAttributes(); // check for all relevant file types and add them to the EnumSet if((fileAttributes & FileAttributes.FILE_ATTRIBUTE_DIRECTORY.getValue()) != 0) { type.add(Type.directory); } else { type.add(Type.file); } final PathAttributes attr = new PathAttributes(); attr.setAccessedDate(f.getLastAccessTime().toEpochMillis()); attr.setModificationDate(f.getLastWriteTime().toEpochMillis()); attr.setCreationDate(f.getCreationTime().toEpochMillis()); attr.setSize(f.getEndOfFile()); attr.setDisplayname(f.getFileName()); result.add(new Path(directory, filename, type, attr)); listener.chunk(directory, result); } } catch(SMBRuntimeException e) { throw new SMBExceptionMappingService().map("Listing directory {0} failed", e, directory); } return result; }
@Test public void testListEmptyFolder() throws Exception { final Path home = new DefaultHomeFinderService(session).find(); final Path emptyFolder = new SMBDirectoryFeature(session).mkdir( new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final AttributedList<Path> result = new SMBListService(session).list(emptyFolder, new DisabledListProgressListener()); assertEquals(0, result.size()); new SMBDeleteFeature(session).delete(Collections.singletonList(emptyFolder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public RexNode visit(CallExpression call) { boolean isBatchMode = unwrapContext(relBuilder).isBatchMode(); for (CallExpressionConvertRule rule : getFunctionConvertChain(isBatchMode)) { Optional<RexNode> converted = rule.convert(call, newFunctionContext()); if (converted.isPresent()) { return converted.get(); } } throw new RuntimeException("Unknown call expression: " + call); }
@Test void testSymbolLiteral() { RexNode rex = converter.visit(valueLiteral(TimePointUnit.MICROSECOND)); assertThat(((RexLiteral) rex).getValueAs(TimeUnit.class)).isEqualTo(TimeUnit.MICROSECOND); assertThat(rex.getType().getSqlTypeName()).isEqualTo(SqlTypeName.SYMBOL); }
public static int size(Object object) { int total = 0; if (object instanceof Map) { total = ((Map) object).size(); } else if (object instanceof Collection) { total = ((Collection) object).size(); } else if (object instanceof Object[]) { total = ((Object[]) object).length; } else if (object instanceof Iterator) { Iterator it = (Iterator) object; while (it.hasNext()) { total++; it.next(); } } else if (object instanceof Enumeration) { Enumeration it = (Enumeration) object; while (it.hasMoreElements()) { total++; it.nextElement(); } } else if (object == null) { throw new IllegalArgumentException("Unsupported object type: null"); } else { try { total = Array.getLength(object); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Unsupported object type: " + object.getClass().getName()); } } return total; }
@Test void testSize1() { assertThrows(IllegalArgumentException.class, () -> { CollectionUtils.size(null); }); }
public static Entry entry(String name) throws BlockException { return Env.sph.entry(name, EntryType.OUT, 1, OBJECTS0); }
@Test public void testMethodEntryCount() throws BlockException, NoSuchMethodException, SecurityException { Method method = SphUTest.class.getMethod("testMethodEntryNormal"); Entry e = SphU.entry(method, 2); assertNotNull(e); assertTrue(StringUtil .equalsIgnoreCase(e.resourceWrapper.getName(), "com.alibaba.csp.sentinel.SphUTest:testMethodEntryNormal()")); assertEquals(e.resourceWrapper.getEntryType(), EntryType.OUT); e.exit(2); }
LogicalKeyValueSegment createReservedSegment(final long segmentId, final String segmentName) { if (segmentId >= 0) { throw new IllegalArgumentException("segmentId for a reserved segment must be negative"); } final LogicalKeyValueSegment newSegment = new LogicalKeyValueSegment(segmentId, segmentName, physicalStore); if (reservedSegments.put(segmentId, newSegment) != null) { throw new IllegalStateException("LogicalKeyValueSegment already exists."); } return newSegment; }
@Test public void shouldGetSegmentsWithinTimeRange() { // presence of reserved segment changes nothing (reserved segments are not returned from segments()) segments.createReservedSegment(-1, "reserved"); final long streamTime = updateStreamTimeAndCreateSegment(4); segments.getOrCreateSegmentIfLive(0, context, streamTime); segments.getOrCreateSegmentIfLive(2, context, streamTime); segments.getOrCreateSegmentIfLive(1, context, streamTime); // intentionally out of order for test segments.getOrCreateSegmentIfLive(3, context, streamTime); segments.getOrCreateSegmentIfLive(4, context, streamTime); final List<LogicalKeyValueSegment> segments = this.segments.segments(0, 2 * SEGMENT_INTERVAL, true); assertEquals(3, segments.size()); assertEquals(0, segments.get(0).id()); assertEquals(1, segments.get(1).id()); assertEquals(2, segments.get(2).id()); }
@ApiOperation(value = "Get a user’s info", tags = { "Users" }) @ApiResponses(value = { @ApiResponse(code = 200, message = "Indicates the user was found and the user has info for the given key."), @ApiResponse(code = 404, message = "Indicates the requested user was not found or the user does ot have info for the given key. Status description contains additional information about the error.") }) @GetMapping(value = "/identity/users/{userId}/info/{key}", produces = "application/json") public UserInfoResponse getUserInfo(@ApiParam(name = "userId") @PathVariable("userId") String userId, @ApiParam(name = "key") @PathVariable("key") String key) { User user = getUserFromRequest(userId); String existingValue = identityService.getUserInfo(user.getId(), key); if (existingValue == null) { throw new FlowableObjectNotFoundException("User info with key '" + key + "' does not exists for user '" + user.getId() + "'.", null); } return restResponseFactory.createUserInfoResponse(key, existingValue, user.getId()); }
@Test public void testGetUserInfo() throws Exception { User savedUser = null; try { User newUser = identityService.newUser("testuser"); newUser.setFirstName("Fred"); newUser.setLastName("McDonald"); newUser.setEmail("no-reply@flowable.org"); identityService.saveUser(newUser); savedUser = newUser; identityService.setUserInfo(newUser.getId(), "key1", "Value 1"); CloseableHttpResponse response = executeRequest( new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_USER_INFO, newUser.getId(), "key1")), HttpStatus.SC_OK); JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertThatJson(responseNode) .isEqualTo("{" + "key: 'key1'," + "value: 'Value 1'," + "url: '" + SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_USER_INFO, newUser.getId(), "key1") + "'" + "}"); } finally { // Delete user after test passes or fails if (savedUser != null) { identityService.deleteUser(savedUser.getId()); } } }
public final Logger getLogger(final Class<?> clazz) { return getLogger(clazz.getName()); }
@Test public void testNull() { try { lc.getLogger((String) null); fail("null should cause an exception"); } catch (IllegalArgumentException e) { } }
@Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((host == null) ? 0 : host.hashCode()); result = prime * result + port; result = prime * result + ((protocol == null) ? 0 : protocol.hashCode()); return result; }
@Test public void testHashCode() { ServerConfig config1 = new ServerConfig(); ServerConfig config2 = new ServerConfig(); config1.setHost("127.0.0.1").setPort(1234).setProtocol("xxx"); config2.setHost("127.0.0.1").setPort(1235).setProtocol("xxx"); Assert.assertFalse(config1.hashCode() == config2.hashCode()); config2.setPort(1234); Assert.assertTrue(config1.hashCode() == config2.hashCode()); }
@Override public <T> T convert(DataTable dataTable, Type type) { return convert(dataTable, type, false); }
@Test void convert_to_map_of_primitive_to_map_of_primitive_to_primitive() { DataTable table = parse("", "| | lat | lon |", "| KMSY | 29.993333 | -90.258056 |", "| KSFO | 37.618889 | -122.375 |", "| KSEA | 47.448889 | -122.309444 |", "| KJFK | 40.639722 | -73.778889 |"); Map<String, Map<String, Double>> expected = new HashMap<String, Map<String, Double>>() { { put("KMSY", new HashMap<String, Double>() { { put("lat", 29.993333); put("lon", -90.258056); } }); put("KSFO", new HashMap<String, Double>() { { put("lat", 37.618889); put("lon", -122.375); } }); put("KSEA", new HashMap<String, Double>() { { put("lat", 47.448889); put("lon", -122.309444); } }); put("KJFK", new HashMap<String, Double>() { { put("lat", 40.639722); put("lon", -73.778889); } }); } }; assertEquals(expected, converter.convert(table, MAP_OF_STRING_TO_MAP_OF_STRING_DOUBLE)); }
public void deleteCommentById(final Long memberId, final Long commentId) { Comment comment = findComment(commentId); comment.validateWriterId(memberId); commentRepository.deleteById(commentId); }
@Test void 댓글이_없다면_댓글을_삭제하지_못한다() { // when & then assertThatThrownBy(() -> commentService.deleteCommentById(1L, 1L)) .isInstanceOf(CommentNotFoundException.class); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStartBounds, final Range<Instant> windowEndBounds, final Optional<Position> position ) { try { final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds); final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds); final WindowKeyQuery<GenericKey, ValueAndTimestamp<GenericRow>> query = WindowKeyQuery.withKeyAndWindowStartRange(key, lower, upper); StateQueryRequest<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> request = inStore(stateStore.getStateStoreName()).withQuery(query); if (position.isPresent()) { request = request.withPositionBound(PositionBound.at(position.get())); } final KafkaStreams streams = stateStore.getKafkaStreams(); final StateQueryResult<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> result = streams.query(request); final QueryResult<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> queryResult = result.getPartitionResults().get(partition); if (queryResult.isFailure()) { throw failedQueryException(queryResult); } if (queryResult.getResult() == null) { return KsMaterializedQueryResult.rowIteratorWithPosition( Collections.emptyIterator(), queryResult.getPosition()); } try (WindowStoreIterator<ValueAndTimestamp<GenericRow>> it = queryResult.getResult()) { final Builder<WindowedRow> builder = ImmutableList.builder(); while (it.hasNext()) { final KeyValue<Long, ValueAndTimestamp<GenericRow>> next = it.next(); final Instant windowStart = Instant.ofEpochMilli(next.key); if (!windowStartBounds.contains(windowStart)) { continue; } final Instant windowEnd = windowStart.plus(windowSize); if (!windowEndBounds.contains(windowEnd)) { continue; } final TimeWindow window = new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli()); final WindowedRow row = WindowedRow.of( stateStore.schema(), new Windowed<>(key, window), next.value.value(), next.value.timestamp() ); builder.add(row); } return KsMaterializedQueryResult.rowIteratorWithPosition( builder.build().iterator(), queryResult.getPosition()); } } catch (final NotUpToBoundException | MaterializationException e) { throw e; } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test @SuppressWarnings("unchecked") public void shouldMaintainResultOrder() { // Given: when(fetchIterator.hasNext()) .thenReturn(true) .thenReturn(true) .thenReturn(true) .thenReturn(false); final Instant start = WINDOW_START_BOUNDS.lowerEndpoint(); final StateQueryResult<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> partitionResult = new StateQueryResult<>(); final QueryResult<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> result = QueryResult.forResult(fetchIterator); result.setPosition(POSITION); partitionResult.addResult(PARTITION, result); when(kafkaStreams.query(any(StateQueryRequest.class))).thenReturn(partitionResult); when(fetchIterator.next()) .thenReturn(new KeyValue<>(start.toEpochMilli(), VALUE_1)) .thenReturn(new KeyValue<>(start.plusMillis(1).toEpochMilli(), VALUE_2)) .thenReturn(new KeyValue<>(start.plusMillis(2).toEpochMilli(), VALUE_3)) .thenThrow(new AssertionError()); // When: final Iterator<WindowedRow> rowIterator = table.get(A_KEY, PARTITION, Range.all(), Range.all()).rowIterator; // Then: assertThat(rowIterator.hasNext(), is(true)); final List<WindowedRow> resultList = Lists.newArrayList(rowIterator); assertThat(resultList, contains( WindowedRow.of( SCHEMA, windowedKey(start), VALUE_1.value(), VALUE_1.timestamp() ), WindowedRow.of( SCHEMA, windowedKey(start.plusMillis(1)), VALUE_2.value(), VALUE_2.timestamp() ), WindowedRow.of( SCHEMA, windowedKey(start.plusMillis(2)), VALUE_3.value(), VALUE_3.timestamp() ) )); }
public static void validateConfig(Object config, Class annotationClass) { for (Field field : config.getClass().getDeclaredFields()) { Object value = null; field.setAccessible(true); try { value = field.get(config); } catch (IllegalAccessException e) { throw new RuntimeException(e); } validateField(field, value, annotationClass); } validateClass(config, annotationClass); }
@Test public void testCustomString() { TestConfig testConfig = createGoodConfig(); testConfig.customString = "http://google.com"; Exception e = expectThrows(IllegalArgumentException.class, () -> ConfigValidation.validateConfig(testConfig)); assertTrue(e.getMessage().contains("customString")); }
public static String createGPX(InstructionList instructions, String trackName, long startTimeMillis, boolean includeElevation, boolean withRoute, boolean withTrack, boolean withWayPoints, String version, Translation tr) { DateFormat formatter = Helper.createFormatter(); DecimalFormat decimalFormat = new DecimalFormat("#", DecimalFormatSymbols.getInstance(Locale.ROOT)); decimalFormat.setMinimumFractionDigits(1); decimalFormat.setMaximumFractionDigits(6); decimalFormat.setMinimumIntegerDigits(1); String header = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\" ?>" + "<gpx xmlns=\"http://www.topografix.com/GPX/1/1\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " creator=\"Graphhopper version " + version + "\" version=\"1.1\"" // This xmlns:gh acts only as ID, no valid URL necessary. // Use a separate namespace for custom extensions to make basecamp happy. + " xmlns:gh=\"https://graphhopper.com/public/schema/gpx/1.1\">" + "\n<metadata>" + "<copyright author=\"OpenStreetMap contributors\"/>" + "<link href=\"http://graphhopper.com\">" + "<text>GraphHopper GPX</text>" + "</link>" + "<time>" + formatter.format(startTimeMillis) + "</time>" + "</metadata>"; StringBuilder gpxOutput = new StringBuilder(header); if (!instructions.isEmpty()) { if (withWayPoints) { createWayPointBlock(gpxOutput, instructions.get(0), decimalFormat, tr); // Start for (Instruction currInstr : instructions) { if ((currInstr.getSign() == Instruction.REACHED_VIA) // Via || (currInstr.getSign() == Instruction.FINISH)) // End { createWayPointBlock(gpxOutput, currInstr, decimalFormat, tr); } } } if (withRoute) { gpxOutput.append("\n<rte>"); Instruction nextInstr = null; for (Instruction currInstr : instructions) { if (null != nextInstr) createRteptBlock(gpxOutput, nextInstr, currInstr, decimalFormat, tr); nextInstr = currInstr; } createRteptBlock(gpxOutput, nextInstr, null, decimalFormat, tr); gpxOutput.append("\n</rte>"); } } if (withTrack) { gpxOutput.append("\n<trk><name>").append(trackName).append("</name>"); gpxOutput.append("<trkseg>"); for (GPXEntry entry : createGPXList(instructions)) { gpxOutput.append("\n<trkpt lat=\"").append(decimalFormat.format(entry.getPoint().getLat())); gpxOutput.append("\" lon=\"").append(decimalFormat.format(entry.getPoint().getLon())).append("\">"); if (includeElevation) gpxOutput.append("<ele>").append(Helper.round2(((GHPoint3D) entry.getPoint()).getEle())).append("</ele>"); if (entry.getTime() != null) gpxOutput.append("<time>").append(formatter.format(startTimeMillis + entry.getTime())).append("</time>"); gpxOutput.append("</trkpt>"); } gpxOutput.append("\n</trkseg>"); gpxOutput.append("\n</trk>"); } // we could now use 'wpt' for via points gpxOutput.append("\n</gpx>"); return gpxOutput.toString(); }
@Test public void testCreateGPXIncludesRoundaboutExitNumber() { InstructionList instructions = new InstructionList(trMap.getWithFallBack(Locale.US)); PointList pl = new PointList(); pl.add(52.555423473315, 13.43890086052345); pl.add(52.555550691982, 13.43946393816465); pl.add(52.555619423589, 13.43886994061328); RoundaboutInstruction instr = new RoundaboutInstruction(Instruction.USE_ROUNDABOUT, "streetname", pl) .setRadian(2.058006514284998d) .setExitNumber(3) .setExited(); instructions.add(instr); instructions.add(new FinishInstruction(52.555619423589, 13.43886994061328, 0)); String gpxStr = GpxConversions.createGPX(instructions, "test", 0, true, true, false, false, Constants.VERSION, trMap.getWithFallBack(Locale.US)); assertTrue(gpxStr.contains("<gh:exit_number>3</gh:exit_number>"), gpxStr); verifyGPX(gpxStr); }
@Override public GroupAssignment assign( GroupSpec groupSpec, SubscribedTopicDescriber subscribedTopicDescriber ) throws PartitionAssignorException { if (groupSpec.memberIds().isEmpty()) { return new GroupAssignment(Collections.emptyMap()); } else if (groupSpec.subscriptionType() == SubscriptionType.HOMOGENEOUS) { return assignHomogeneousGroup(groupSpec, subscribedTopicDescriber); } else { return assignHeterogeneousGroup(groupSpec, subscribedTopicDescriber); } }
@Test public void testReassignmentWhenOneMemberAddedAfterInitialAssignmentWithTwoMembersTwoTopics() { Map<Uuid, TopicMetadata> topicMetadata = new HashMap<>(); topicMetadata.put(topic1Uuid, new TopicMetadata( topic1Uuid, topic1Name, 3, Collections.emptyMap() )); topicMetadata.put(topic2Uuid, new TopicMetadata( topic2Uuid, topic2Name, 3, Collections.emptyMap() )); Map<String, MemberSubscriptionAndAssignmentImpl> members = new TreeMap<>(); members.put(memberA, new MemberSubscriptionAndAssignmentImpl( Optional.empty(), Optional.empty(), mkSet(topic1Uuid, topic2Uuid), new Assignment(mkAssignment( mkTopicAssignment(topic1Uuid, 0, 1), mkTopicAssignment(topic2Uuid, 0, 1) )) )); members.put(memberB, new MemberSubscriptionAndAssignmentImpl( Optional.empty(), Optional.empty(), mkSet(topic1Uuid, topic2Uuid), new Assignment(mkAssignment( mkTopicAssignment(topic1Uuid, 2), mkTopicAssignment(topic2Uuid, 2) )) )); // Add a new Member to trigger a re-assignment members.put(memberC, new MemberSubscriptionAndAssignmentImpl( Optional.empty(), Optional.empty(), mkSet(topic1Uuid, topic2Uuid), Assignment.EMPTY )); GroupSpec groupSpec = new GroupSpecImpl( members, HOMOGENEOUS, invertedTargetAssignment(members) ); SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata); GroupAssignment computedAssignment = assignor.assign( groupSpec, subscribedTopicMetadata ); Map<String, Map<Uuid, Set<Integer>>> expectedAssignment = new HashMap<>(); expectedAssignment.put(memberA, mkAssignment( mkTopicAssignment(topic1Uuid, 0), mkTopicAssignment(topic2Uuid, 0) )); expectedAssignment.put(memberB, mkAssignment( mkTopicAssignment(topic1Uuid, 1), mkTopicAssignment(topic2Uuid, 1) )); expectedAssignment.put(memberC, mkAssignment( mkTopicAssignment(topic1Uuid, 2), mkTopicAssignment(topic2Uuid, 2) )); assertAssignment(expectedAssignment, computedAssignment); }
@Override public void execute(Runnable command) { if (started) { executorService.submit(command); } }
@Test void ifNotStartedJobsAreNotAccepted() { VirtualThreadJobRunrExecutor jobRunrExecutor = new VirtualThreadJobRunrExecutor(8, executorService); jobRunrExecutor.execute(() -> System.out.println("A Runnable")); verifyNoInteractions(executorService); }
public Set<SourceName> sourcesWithField( final Optional<SourceName> source, final ColumnName target ) { if (!source.isPresent()) { return sourceSchemas.entrySet().stream() .filter(e -> e.getValue().findColumn(target).isPresent()) .map(Entry::getKey) .collect(Collectors.toSet()); } final SourceName sourceName = source.get(); final LogicalSchema sourceSchema = sourceSchemas.get(sourceName); if (sourceSchema == null) { return ImmutableSet.of(); } return sourceSchema.findColumn(target).isPresent() ? ImmutableSet.of(sourceName) : ImmutableSet.of(); }
@Test public void shouldFindUnqualifiedCommonField() { assertThat(sourceSchemas.sourcesWithField(Optional.empty(), COMMON_VALUE_FIELD_NAME), containsInAnyOrder(ALIAS_1, ALIAS_2)); }
public void setEvent(String event) { this.event = event; }
@Test public void setEvent() { SAExposureData exposureData = new SAExposureData("ExposeEvent"); exposureData.setEvent("SetExposeEvent"); Assert.assertEquals("SetExposeEvent", exposureData.getEvent()); }
public void succeededAppsKilled(long duration) { totalSucceededAppsKilled.add(duration); killApplicationLatency.add(duration); }
@Test public void testSucceededAppsKilled() { long totalGoodBefore = metrics.getNumSucceededAppsKilled(); goodSubCluster.forceKillApplication(100); Assert.assertEquals(totalGoodBefore + 1, metrics.getNumSucceededAppsKilled()); Assert.assertEquals(100, metrics.getLatencySucceededAppsKilled(), 0); goodSubCluster.forceKillApplication(200); Assert.assertEquals(totalGoodBefore + 2, metrics.getNumSucceededAppsKilled()); Assert.assertEquals(150, metrics.getLatencySucceededAppsKilled(), 0); }
public int expireEntriesIfNotSet(Set<K> keys, Duration ttl, Duration maxIdleTime) { return get(expireEntriesIfNotSetAsync(keys, ttl, maxIdleTime)); }
@Test public void testExpireEntriesIfNotSet() { RMapCache<String, String> testMap = redisson.getMapCache("map"); testMap.put("key1", "value"); testMap.put("key2", "value"); testMap.expireEntriesIfNotSet(new HashSet<>(Arrays.asList("key1", "key2")), Duration.ofMillis(0), Duration.ofMillis(20000)); assertThat(testMap.remainTimeToLive("key1")).isBetween(19800L, 20000L); }
Plugin create(Options.Plugin plugin) { try { return instantiate(plugin.pluginString(), plugin.pluginClass(), plugin.argument()); } catch (IOException | URISyntaxException e) { throw new CucumberException(e); } }
@Test void instantiates_usage_plugin_with_file_arg() { PluginOption option = parse("usage:" + tmp.resolve("out.txt").toAbsolutePath()); plugin = fc.create(option); assertThat(plugin.getClass(), is(equalTo(UsageFormatter.class))); }
public BoundingBox extendMeters(int meters) { if (meters == 0) { return this; } else if (meters < 0) { throw new IllegalArgumentException("BoundingBox extend operation does not accept negative values"); } double verticalExpansion = LatLongUtils.latitudeDistance(meters); double horizontalExpansion = LatLongUtils.longitudeDistance(meters, Math.max(Math.abs(minLatitude), Math.abs(maxLatitude))); double minLat = Math.max(MercatorProjection.LATITUDE_MIN, this.minLatitude - verticalExpansion); double minLon = Math.max(-180, this.minLongitude - horizontalExpansion); double maxLat = Math.min(MercatorProjection.LATITUDE_MAX, this.maxLatitude + verticalExpansion); double maxLon = Math.min(180, this.maxLongitude + horizontalExpansion); return new BoundingBox(minLat, minLon, maxLat, maxLon); }
@Test public void extendMetersTest() { BoundingBox boundingBox1 = new BoundingBox(MIN_LATITUDE, MIN_LONGITUDE, MAX_LATITUDE, MAX_LONGITUDE); BoundingBox boundingBox2 = new BoundingBox(MIN_LATITUDE - 1, MIN_LONGITUDE - 1, MAX_LATITUDE, MAX_LONGITUDE); BoundingBox boundingBox3 = new BoundingBox(MIN_LATITUDE, MIN_LONGITUDE, MAX_LATITUDE + 1, MAX_LONGITUDE + 1); Assert.assertEquals(boundingBox1, boundingBox1.extendMeters(0)); Assert.assertEquals(boundingBox2, boundingBox2.extendMeters(0)); Assert.assertEquals(boundingBox3, boundingBox3.extendMeters(0)); Assert.assertTrue(boundingBox1.extendMeters(20).contains(new LatLong(MIN_LATITUDE, MAX_LONGITUDE))); Assert.assertTrue(boundingBox1.extendMeters(20).contains(new LatLong(MAX_LATITUDE, MAX_LONGITUDE))); Assert.assertTrue(boundingBox1.extendMeters(20).contains(new LatLong(MAX_LATITUDE, MIN_LONGITUDE))); Assert.assertTrue(boundingBox1.extendMeters(20).contains(new LatLong(MIN_LATITUDE, MIN_LONGITUDE))); }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldParseSetStatementsWithVariables() { List<CommandParser.ParsedCommand> commands = parse("SET '${name}'='${value}';", ImmutableMap.of("name", "foo.property", "value", "bar")); assertThat(commands.size(), is(1)); assertThat(commands.get(0).getStatement().isPresent(), is (true)); assertThat(commands.get(0).getStatement().get(), instanceOf(SetProperty.class)); assertThat(commands.get(0).getCommand(), is("SET 'foo.property'='bar';")); assertThat(((SetProperty)commands.get(0).getStatement().get()).getPropertyName(), is("foo.property")); assertThat(((SetProperty)commands.get(0).getStatement().get()).getPropertyValue(), is("bar")); }
@Override public List<ServiceInstance> getInstances(String service) { return polarisServiceDiscovery.getInstances(service); }
@Test public void testGetInstances() { when(polarisServiceDiscovery.getInstances(anyString())) .thenReturn(singletonList(mock(PolarisServiceInstance.class))); List<ServiceInstance> serviceInstances = client.getInstances(SERVICE_PROVIDER); assertThat(serviceInstances).isNotEmpty(); }