focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
private void announceBackgroundJobServer() { final BackgroundJobServerStatus serverStatus = backgroundJobServer.getServerStatus(); storageProvider.announceBackgroundJobServer(serverStatus); determineIfCurrentBackgroundJobServerIsMaster(); lastSignalAlive = serverStatus.getLastHeartbeat(); }
@Test void otherServersDoZookeepingAndBecomeMasterIfMasterCrashes() { final BackgroundJobServerStatus master = anotherServer(); storageProvider.announceBackgroundJobServer(master); backgroundJobServer.start(); await().atMost(TWO_SECONDS) .untilAsserted(() -> assertThat(backgroundJobServer.isMaster()).isFalse()); await() .atLeast(1, TimeUnit.SECONDS) .atMost(8, TimeUnit.SECONDS) .untilAsserted(() -> assertThat(storageProvider.getBackgroundJobServers()).hasSize(1)); await().atMost(FIVE_SECONDS) .untilAsserted(() -> assertThat(backgroundJobServer.isMaster()).isTrue()); verify(storageProvider, times(1)).removeTimedOutBackgroundJobServers(any()); }
public boolean isComplete() { return !ruleMetaData.getRules().isEmpty() && !resourceMetaData.getStorageUnits().isEmpty(); }
@Test void assertIsNotCompleteWithoutRule() { ResourceMetaData resourceMetaData = new ResourceMetaData(Collections.singletonMap("ds", new MockedDataSource())); RuleMetaData ruleMetaData = new RuleMetaData(Collections.emptyList()); assertFalse(new ShardingSphereDatabase("foo_db", mock(DatabaseType.class), resourceMetaData, ruleMetaData, Collections.emptyMap()).isComplete()); }
@InvokeOnHeader(Web3jConstants.ETH_UNINSTALL_FILTER) void ethUninstallFilter(Message message) throws IOException { BigInteger filterId = message.getHeader(Web3jConstants.FILTER_ID, configuration::getFilterId, BigInteger.class); Request<?, EthUninstallFilter> request = web3j.ethUninstallFilter(filterId); setRequestId(message, request); EthUninstallFilter response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.isUninstalled()); } }
@Test public void ethUninstallFilterTest() throws Exception { EthUninstallFilter response = Mockito.mock(EthUninstallFilter.class); Mockito.when(mockWeb3j.ethUninstallFilter(any())).thenReturn(request); Mockito.when(request.send()).thenReturn(response); Mockito.when(response.isUninstalled()).thenReturn(Boolean.TRUE); Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.ETH_UNINSTALL_FILTER); template.send(exchange); Boolean body = exchange.getIn().getBody(Boolean.class); assertTrue(body); }
@Override public Port getPort(DeviceId deviceId, PortNumber portNumber) { Map<PortNumber, Port> ports = devicePorts.get(deviceId); return ports == null ? null : ports.get(portNumber); }
@Test public final void testGetPort() { putDevice(DID1, SW1); putDevice(DID2, SW1); List<PortDescription> pds = Arrays.asList( DefaultPortDescription.builder().withPortNumber(P1).isEnabled(true).build(), DefaultPortDescription.builder().withPortNumber(P2).isEnabled(false).build() ); deviceStore.updatePorts(PID, DID1, pds); Port port1 = deviceStore.getPort(DID1, P1); assertEquals(P1, port1.number()); assertTrue("Port is enabled", port1.isEnabled()); Port port2 = deviceStore.getPort(DID1, P2); assertEquals(P2, port2.number()); assertFalse("Port is disabled", port2.isEnabled()); Port port3 = deviceStore.getPort(DID1, P3); assertNull("P3 not expected", port3); }
public <T extends VFSConnectionDetails> boolean test( @NonNull ConnectionManager manager, @NonNull T details, @Nullable VFSConnectionTestOptions options ) throws KettleException { if ( options == null ) { options = new VFSConnectionTestOptions(); } // The specified connection details may not exist saved in the meta-store, // but still needs to have a non-empty name in it, to be able to form a temporary PVFS URI. if ( StringUtils.isEmpty( details.getName() ) ) { return false; } VFSConnectionProvider<T> provider = getExistingProvider( manager, details ); if ( !provider.test( details ) ) { return false; } if ( !details.isRootPathSupported() || options.isRootPathIgnored() ) { return true; } String resolvedRootPath; try { resolvedRootPath = getResolvedRootPath( details ); } catch ( KettleException e ) { // Invalid root path. return false; } if ( resolvedRootPath == null ) { return !details.isRootPathRequired(); } // Ensure that root path exists and is a folder. return isFolder( getConnectionRootProviderFileObject( manager, provider, details ) ); }
@Test public void testTestReturnsTrueWhenRootPathIsValid() throws KettleException { assertTrue( vfsConnectionManagerHelper.test( connectionManager, vfsConnectionDetails, getTestOptionsCheckRootPath() ) ); }
@Override public Blob getBlob(final int columnIndex) throws SQLException { return (Blob) mergeResultSet.getValue(columnIndex, Blob.class); }
@Test void assertGetBlobWithColumnIndex() throws SQLException { Blob blob = mock(Blob.class); when(mergeResultSet.getValue(1, Blob.class)).thenReturn(blob); assertThat(shardingSphereResultSet.getBlob(1), is(blob)); }
public static boolean isExpiration(String token) { try { return getTokenBody(token).getExpiration().before(new Date()); } catch (ExpiredJwtException e) { return true; } }
@Test public void isExpiration() { boolean isExpiration = JwtTokenUtil.isExpiration(token); Assert.isTrue(!isExpiration); }
public Span nextSpan(Message message) { TraceContextOrSamplingFlags extracted = extractAndClearTraceIdProperties(processorExtractor, message, message); Span result = tracer.nextSpan(extracted); // Processor spans use the normal sampler. // When an upstream context was not present, lookup keys are unlikely added if (extracted.context() == null && !result.isNoop()) { // simplify code by re-using an existing MessagingRequest impl tagQueueOrTopic(new MessageConsumerRequest(message, destination(message)), result); } return result; }
@Test void nextSpan_should_not_clear_other_headers() throws JMSException { message.setIntProperty("foo", 1); jmsTracing.nextSpan(message); assertThat(message.getIntProperty("foo")).isEqualTo(1); }
public static List<MusicProtocol.MusicPlaylist> convertToAppleMusicPlaylist(Collection<MusicPlaylist> musicPlaylists) { List<MusicProtocol.MusicPlaylist> convertedPlaylists = new ArrayList<>(); for (MusicPlaylist playlist : musicPlaylists) { if (playlist == null) { throw new IllegalStateException("MusicPlaylist cannot be null"); } MusicProtocol.MusicPlaylist.Builder playlistBuilder = MusicProtocol.MusicPlaylist.newBuilder(); if (!StringUtils.isBlank(playlist.getId())) { playlistBuilder.setId(playlist.getId()); } if (!StringUtils.isBlank(playlist.getTitle())) { playlistBuilder.setTitle(playlist.getTitle()); } if (!StringUtils.isBlank(playlist.getDescription())) { playlistBuilder.setDescription(playlist.getDescription()); } if (playlist.getTimeCreated() != null) { playlistBuilder.setTimeCreated(playlist.getTimeCreated().toEpochMilli()); } if (playlist.getTimeUpdated() != null) { playlistBuilder.setTimeUpdated(playlist.getTimeUpdated().toEpochMilli()); } convertedPlaylists.add(playlistBuilder.build()); } return convertedPlaylists; }
@Test public void testConvertToAppleMusicPlaylist() { String expectedId1 = "expectedId1"; String expectedTitle1 = "Expected Title 1"; String expectedDescription1 = "Expected Description"; Instant expectedTimeCreated1 = Instant.now(); Instant expectedTimeUpdated1 = expectedTimeCreated1.plusMillis(10000L); // No playlists List<MusicProtocol.MusicPlaylist> emptyCollectionMusicPlaylists = AppleMusicPlaylistConverter.convertToAppleMusicPlaylist(List.of()); Assertions.assertNotNull(emptyCollectionMusicPlaylists); Assertions.assertTrue(emptyCollectionMusicPlaylists.isEmpty()); // Null playlist List<MusicPlaylist> nullPlaylists = new ArrayList<>(); nullPlaylists.add(null); Assertions.assertThrows(IllegalStateException.class, () -> AppleMusicPlaylistConverter.convertToAppleMusicPlaylist(nullPlaylists)); // One playlist MusicPlaylist fullMusicPlaylist = new MusicPlaylist(expectedId1, expectedTitle1, expectedDescription1, expectedTimeCreated1, expectedTimeUpdated1); List<MusicPlaylist> musicPlaylists = new ArrayList<>(); musicPlaylists.add(fullMusicPlaylist); List<MusicProtocol.MusicPlaylist> validPlaylistMusicPlaylists = AppleMusicPlaylistConverter.convertToAppleMusicPlaylist(musicPlaylists); Assertions.assertNotNull(validPlaylistMusicPlaylists); Assertions.assertFalse(validPlaylistMusicPlaylists.isEmpty()); MusicProtocol.MusicPlaylist validMusicPlaylist = validPlaylistMusicPlaylists.get(0); Assertions.assertNotNull(validMusicPlaylist); Assertions.assertTrue(validMusicPlaylist.hasId()); Assertions.assertEquals(validMusicPlaylist.getId(), expectedId1); Assertions.assertTrue(validMusicPlaylist.hasDescription()); Assertions.assertEquals(validMusicPlaylist.getDescription(), expectedDescription1); Assertions.assertTrue(validMusicPlaylist.hasTimeCreated()); Assertions.assertEquals(validMusicPlaylist.getTimeCreated(), expectedTimeCreated1.toEpochMilli()); Assertions.assertTrue(validMusicPlaylist.hasTimeUpdated()); Assertions.assertEquals(validMusicPlaylist.getTimeUpdated(), expectedTimeUpdated1.toEpochMilli()); // Totally invalid MusicPlaylist MusicPlaylist invalidMusicPlaylist = new MusicPlaylist(expectedId1, null, null, null, null); List<MusicProtocol.MusicPlaylist> invalidMusicPlaylists = AppleMusicPlaylistConverter.convertToAppleMusicPlaylist(List.of(invalidMusicPlaylist)); Assertions.assertNotNull(invalidMusicPlaylists); Assertions.assertFalse(invalidMusicPlaylists.isEmpty()); MusicProtocol.MusicPlaylist invalidMusicPlaylistResult = invalidMusicPlaylists.get(0); Assertions.assertNotNull(invalidMusicPlaylistResult); Assertions.assertTrue(invalidMusicPlaylistResult.hasId()); Assertions.assertEquals(invalidMusicPlaylistResult.getId(), expectedId1); Assertions.assertFalse(invalidMusicPlaylistResult.hasDescription()); Assertions.assertFalse(invalidMusicPlaylistResult.hasTimeCreated()); Assertions.assertFalse(invalidMusicPlaylistResult.hasTimeUpdated()); }
@Override @ManagedOperation(description = "Does the store contain the given key") public boolean contains(String key) { return setOperations.isMember(repositoryName, key); }
@Test public void shoulCheckForMembers() { idempotentRepository.contains(KEY); verify(setOperations).isMember(REPOSITORY, KEY); }
@Override @NonNull public String getId() { return ID; }
@Test public void shouldCreateWithRemoteGitRepo() throws IOException, UnirestException { String accessToken = needsGithubAccessToken(); User user = login(); this.jwtToken = getJwtToken(j.jenkins, user.getId(), user.getId()); Map resp = createCredentials(user, MapsHelper.of("credentials", new MapsHelper.Builder<String,Object>() .put("password", accessToken) .put("stapler-class", "com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl") .put("scope", "USER") .put("domain","blueocean-git-domain") .put("description", "joe desc") .put("$class", "com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl") .put("username", "joe").build() )); String credentialId = (String) resp.get("id"); Assert.assertNotNull(credentialId); Map r = new RequestBuilder(baseUrl) .status(201) .jwtToken(getJwtToken(j.jenkins, user.getId(), user.getId())) .crumb( crumb ) .post("/organizations/" + getOrgName() + "/pipelines/") .data(MapsHelper.of("name", "demo", "$class", "io.jenkins.blueocean.blueocean_git_pipeline.GitPipelineCreateRequest", "scmConfig", MapsHelper.of("uri", HTTPS_GITHUB_NO_JENKINSFILE, "credentialId", credentialId) )).build(Map.class); assertEquals("demo", r.get("name")); }
public static <T extends ConfigInstance> T getNewInstance(Class<T> type, String configId, ConfigPayload payload) { T instance; try { ConfigTransformer<?> transformer = new ConfigTransformer<>(type); ConfigInstance.Builder instanceBuilder = transformer.toConfigBuilder(payload); Constructor<T> constructor = type.getConstructor(instanceBuilder.getClass()); instance = constructor.newInstance(instanceBuilder); // Workaround for JDK7, where compilation fails due to fields being // private and not accessible from T. Reference it as a // ConfigInstance to work around it. See // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7022052 for // more information. ConfigInstance i = instance; i.postInitialize(configId); setConfigId(i, configId); } catch (InstantiationException | InvocationTargetException | NoSuchMethodException | NoSuchFieldException | IllegalAccessException e) { throw new IllegalArgumentException("Failed creating new instance of '" + type.getCanonicalName() + "' for config id '" + configId + "'", e); } return instance; }
@Test public void testGetNewInstanceErrorMessage() { ConfigPayloadBuilder payloadBuilder = new ConfigPayloadBuilder(); try { ConfigInstanceUtil.getNewInstance(TestNodefsConfig.class, "id0", ConfigPayload.fromBuilder(payloadBuilder)); assert(false); } catch (IllegalArgumentException e) { assertEquals("Failed creating new instance of 'com.yahoo.foo.TestNodefsConfig' for config id 'id0'", e.getMessage()); } }
@VisibleForTesting void initializeForeachArtifactRollup( ForeachStepOverview foreachOverview, ForeachStepOverview prevForeachOverview, String foreachWorkflowId) { Set<Long> iterationsToRunInNewRun = foreachOverview.getIterationsToRunFromDetails(prevForeachOverview); WorkflowRollupOverview aggregatedRollupsPrevRun = getAggregatedRollupFromIterations(foreachWorkflowId, iterationsToRunInNewRun); foreachOverview.initiateStepRollup(prevForeachOverview.getRollup(), aggregatedRollupsPrevRun); }
@Test public void testGetAggregatedRollupFromIterationsManyEven() { ArgumentCaptor<List<Long>> captor = ArgumentCaptor.forClass(List.class); Set<Long> iterations = LongStream.rangeClosed(1, 10).boxed().collect(Collectors.toSet()); doReturn(Collections.singletonList(new WorkflowRollupOverview())) .when(workflowInstanceDao) .getBatchForeachLatestRunRollupForIterations(anyString(), any()); ForeachStepOverview stepOverview = mock(ForeachStepOverview.class); ForeachStepOverview prevStepOverview = new ForeachStepOverview(); doReturn(iterations).when(stepOverview).getIterationsToRunFromDetails(any()); foreachStepRuntime.initializeForeachArtifactRollup( stepOverview, prevStepOverview, "myworkflowid"); Mockito.verify(workflowInstanceDao, times(2)) .getBatchForeachLatestRunRollupForIterations(eq("myworkflowid"), captor.capture()); List<List<Long>> values = captor.getAllValues(); assertEquals(5, values.get(0).size()); assertEquals(5, values.get(1).size()); }
public static MySqlBinlogSplit filterOutdatedSplitInfos( MySqlBinlogSplit binlogSplit, Tables.TableFilter currentTableFilter) { Map<TableId, TableChange> filteredTableSchemas = binlogSplit.getTableSchemas().entrySet().stream() .filter(entry -> currentTableFilter.isIncluded(entry.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); Set<TableId> tablesToRemoveInFinishedSnapshotSplitInfos = binlogSplit.getFinishedSnapshotSplitInfos().stream() .filter(i -> !currentTableFilter.isIncluded(i.getTableId())) .map(split -> split.getTableId()) .collect(Collectors.toSet()); if (tablesToRemoveInFinishedSnapshotSplitInfos.isEmpty()) { return new MySqlBinlogSplit( binlogSplit.splitId, binlogSplit.getStartingOffset(), binlogSplit.getEndingOffset(), binlogSplit.getFinishedSnapshotSplitInfos(), filteredTableSchemas, binlogSplit.totalFinishedSplitSize, binlogSplit.isSuspended()); } LOG.info( "Reader remove tables after restart: {}", tablesToRemoveInFinishedSnapshotSplitInfos); List<FinishedSnapshotSplitInfo> allFinishedSnapshotSplitInfos = binlogSplit.getFinishedSnapshotSplitInfos().stream() .filter( i -> !tablesToRemoveInFinishedSnapshotSplitInfos.contains( i.getTableId())) .collect(Collectors.toList()); return new MySqlBinlogSplit( binlogSplit.splitId, binlogSplit.getStartingOffset(), binlogSplit.getEndingOffset(), allFinishedSnapshotSplitInfos, filteredTableSchemas, binlogSplit.getTotalFinishedSplitSize() - (binlogSplit.getFinishedSnapshotSplitInfos().size() - allFinishedSnapshotSplitInfos.size()), binlogSplit.isSuspended()); }
@Test public void filterOutdatedSplitInfos() { Map<TableId, TableChanges.TableChange> tableSchemas = new HashMap<>(); // mock table1 TableId tableId1 = new TableId("catalog1", null, "table1"); TableChanges.TableChange tableChange1 = new TableChanges.TableChange( TableChanges.TableChangeType.CREATE, new MockTable(TableId.parse("catalog1.table1"))); // mock table2 TableId tableId2 = new TableId("catalog2", null, "table2"); TableChanges.TableChange tableChange2 = new TableChanges.TableChange( TableChanges.TableChangeType.CREATE, new MockTable(TableId.parse("catalog2.table2"))); tableSchemas.put(tableId1, tableChange1); tableSchemas.put(tableId2, tableChange2); MySqlBinlogSplit binlogSplit = new MySqlBinlogSplit( "binlog-split", BinlogOffset.ofLatest(), null, new ArrayList<>(), tableSchemas, 0, false); // case 1: only include table1 Tables.TableFilter currentTableFilter = tableId -> tableId.table().equals("table1"); MySqlBinlogSplit mySqlBinlogSplit = MySqlBinlogSplit.filterOutdatedSplitInfos(binlogSplit, currentTableFilter); Map<TableId, TableChanges.TableChange> filterTableSchemas = mySqlBinlogSplit.getTableSchemas(); Assert.assertEquals(1, filterTableSchemas.size()); Assert.assertEquals(tableChange1, filterTableSchemas.get(tableId1)); // case 2: include all tables currentTableFilter = tableId -> tableId.table().startsWith("table"); mySqlBinlogSplit = MySqlBinlogSplit.filterOutdatedSplitInfos(binlogSplit, currentTableFilter); filterTableSchemas = mySqlBinlogSplit.getTableSchemas(); Assert.assertEquals(2, filterTableSchemas.size()); Assert.assertEquals(tableChange1, filterTableSchemas.get(tableId1)); Assert.assertEquals(tableChange2, filterTableSchemas.get(tableId2)); }
@Udf public List<Integer> generateSeriesInt( @UdfParameter(description = "The beginning of the series") final int start, @UdfParameter(description = "Marks the end of the series (inclusive)") final int end ) { return generateSeriesInt(start, end, end - start > 0 ? 1 : -1); }
@Test public void shouldThrowIfStepWrongSignInt1() { // When: final Exception e = assertThrows( KsqlFunctionException.class, () -> rangeUdf.generateSeriesInt(0, 10, -1) ); // Then: assertThat(e.getMessage(), containsString( "GENERATE_SERIES step has wrong sign")); }
public static String getKey(String dataId, String group) { return doGetKey(dataId, group, ""); }
@Test void testGetKeyByThreeParams() { // Act final String actual = GroupKey.getKey(",", ",", "3"); // Assert result assertEquals(",+,+3", actual); }
public Img scale(float scale) { if (scale < 0) { // 自动修正负数 scale = -scale; } final Image srcImg = getValidSrcImg(); // PNG图片特殊处理 if (ImgUtil.IMAGE_TYPE_PNG.equals(this.targetImageType)) { // 修正float转double导致的精度丢失 final double scaleDouble = NumberUtil.toDouble(scale); this.targetImage = ImgUtil.transform(AffineTransform.getScaleInstance(scaleDouble, scaleDouble), ImgUtil.toBufferedImage(srcImg, this.targetImageType)); } else { // 缩放后的图片宽 final int width = NumberUtil.mul((Number) srcImg.getWidth(null), scale).intValue(); // 缩放后的图片高 final int height = NumberUtil.mul((Number) srcImg.getHeight(null), scale).intValue(); scale(width, height); } return this; }
@Test @Disabled public void scaleTest() { final String downloadFile = "d:/test/1435859438434136064.JPG"; final File file = FileUtil.file(downloadFile); final File fileScale = FileUtil.file(downloadFile + ".scale." + FileTypeUtil.getType(file)); final Image img = ImgUtil.getImage(URLUtil.getURL(file)); ImgUtil.scale(img, fileScale, 0.8f); }
@Override public void report(final SortedMap<MetricName, Gauge> gauges, final SortedMap<MetricName, Counter> counters, final SortedMap<MetricName, Histogram> histograms, final SortedMap<MetricName, Meter> meters, final SortedMap<MetricName, Timer> timers) { final long now = System.currentTimeMillis(); if(logger.isDebugEnabled()) logger.debug("InfluxDbReporter report is called with counter size " + counters.size()); try { influxDb.flush(); for (Map.Entry<MetricName, Gauge> entry : gauges.entrySet()) { reportGauge(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Counter> entry : counters.entrySet()) { reportCounter(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Histogram> entry : histograms.entrySet()) { reportHistogram(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Meter> entry : meters.entrySet()) { reportMeter(entry.getKey(), entry.getValue(), now); } for (Map.Entry<MetricName, Timer> entry : timers.entrySet()) { reportTimer(entry.getKey(), entry.getValue(), now); } if (influxDb.hasSeriesData()) { influxDb.writeData(); } // reset counters for (Map.Entry<MetricName, Counter> entry : counters.entrySet()) { Counter counter = entry.getValue(); long count = counter.getCount(); counter.dec(count); } } catch (Exception e) { logger.error("Unable to report to InfluxDB. Discarding data.", e); } }
@Test public void reportsDoubleGaugeValues() throws Exception { reporter.report(map("gauge", gauge(1.1)), this.map(), this.map(), this.map(), this.map()); final ArgumentCaptor<InfluxDbPoint> influxDbPointCaptor = ArgumentCaptor.forClass(InfluxDbPoint.class); Mockito.verify(influxDb, atLeastOnce()).appendPoints(influxDbPointCaptor.capture()); InfluxDbPoint point = influxDbPointCaptor.getValue(); /* assertThat(point.getMeasurement()).isEqualTo("gauge"); assertThat(point.getFields()).isNotEmpty(); assertThat(point.getFields()).hasSize(1); assertThat(point.getFields()).contains(entry("value", 1.1)); */ }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) { String highwayValue = way.getTag("highway"); if (skipEmergency && "service".equals(highwayValue) && "emergency_access".equals(way.getTag("service"))) return; int firstIndex = way.getFirstIndex(restrictionKeys); String firstValue = firstIndex < 0 ? "" : way.getTag(restrictionKeys.get(firstIndex), ""); if (restrictedValues.contains(firstValue) && !hasTemporalRestriction(way, firstIndex, restrictionKeys)) return; if (way.hasTag("gh:barrier_edge") && way.hasTag("node_tags")) { List<Map<String, Object>> nodeTags = way.getTag("node_tags", null); Map<String, Object> firstNodeTags = nodeTags.get(0); // a barrier edge has the restriction in both nodes and the tags are the same -> get(0) firstValue = getFirstPriorityNodeTag(firstNodeTags, restrictionKeys); String barrierValue = firstNodeTags.containsKey("barrier") ? (String) firstNodeTags.get("barrier") : ""; if (restrictedValues.contains(firstValue) || barriers.contains(barrierValue) || "yes".equals(firstNodeTags.get("locked")) && !INTENDED.contains(firstValue)) return; } if (FerrySpeedCalculator.isFerry(way)) { boolean isCar = restrictionKeys.contains("motorcar"); if (INTENDED.contains(firstValue) // implied default is allowed only if foot and bicycle is not specified: || isCar && firstValue.isEmpty() && !way.hasTag("foot") && !way.hasTag("bicycle") // if hgv is allowed then smaller trucks and cars are allowed too even if not specified || isCar && way.hasTag("hgv", "yes")) { accessEnc.setBool(false, edgeId, edgeIntAccess, true); accessEnc.setBool(true, edgeId, edgeIntAccess, true); } } else { boolean isRoundabout = roundaboutEnc.getBool(false, edgeId, edgeIntAccess); boolean ignoreOneway = "no".equals(way.getFirstValue(ignoreOnewayKeys)); boolean isBwd = isBackwardOneway(way); if (!ignoreOneway && (isBwd || isRoundabout || isForwardOneway(way))) { accessEnc.setBool(isBwd, edgeId, edgeIntAccess, true); } else { accessEnc.setBool(false, edgeId, edgeIntAccess, true); accessEnc.setBool(true, edgeId, edgeIntAccess, true); } } }
@Test public void testBusYes() { EdgeIntAccess access = new ArrayEdgeIntAccess(1); ReaderWay way = new ReaderWay(0); way.setTag("motor_vehicle", "no"); way.setTag("highway", "tertiary"); int edgeId = 0; parser.handleWayTags(edgeId, access, way, null); assertFalse(busAccessEnc.getBool(false, edgeId, access)); access = new ArrayEdgeIntAccess(1); way.setTag("bus", "yes"); parser.handleWayTags(edgeId, access, way, null); assertTrue(busAccessEnc.getBool(false, edgeId, access)); access = new ArrayEdgeIntAccess(1); way = new ReaderWay(0); way.setTag("highway", "primary"); way.setTag("oneway", "yes"); way.setTag("oneway:bus", "no"); parser.handleWayTags(edgeId, access, way, null); assertTrue(busAccessEnc.getBool(false, edgeId, access)); assertTrue(busAccessEnc.getBool(true, edgeId, access)); access = new ArrayEdgeIntAccess(1); way.setTag("oneway:psv", "no"); way.setTag("oneway:bus", "yes"); parser.handleWayTags(edgeId, access, way, null); assertTrue(busAccessEnc.getBool(false, edgeId, access)); assertFalse(busAccessEnc.getBool(true, edgeId, access)); }
void save(Collection<SinkRecord> sinkRecords) { sinkWriter.save(sinkRecords); }
@Test public void testSave() { when(config.catalogName()).thenReturn("catalog"); try (MockedStatic<KafkaUtils> mockKafkaUtils = mockStatic(KafkaUtils.class)) { ConsumerGroupMetadata consumerGroupMetadata = mock(ConsumerGroupMetadata.class); mockKafkaUtils .when(() -> KafkaUtils.consumerGroupMetadata(any())) .thenReturn(consumerGroupMetadata); SinkTaskContext context = mock(SinkTaskContext.class); TopicPartition topicPartition = new TopicPartition(SRC_TOPIC_NAME, 0); when(context.assignment()).thenReturn(ImmutableSet.of(topicPartition)); IcebergWriterResult writeResult = new IcebergWriterResult( TableIdentifier.parse(TABLE_NAME), ImmutableList.of(EventTestUtil.createDataFile()), ImmutableList.of(), StructType.of()); Map<TopicPartition, Offset> offsets = ImmutableMap.of(topicPartition, new Offset(1L, EventTestUtil.now())); SinkWriterResult sinkWriterResult = new SinkWriterResult(ImmutableList.of(writeResult), offsets); SinkWriter sinkWriter = mock(SinkWriter.class); when(sinkWriter.completeWrite()).thenReturn(sinkWriterResult); Worker worker = new Worker(config, clientFactory, sinkWriter, context); worker.start(); // init consumer after subscribe() initConsumer(); // save a record Map<String, Object> value = ImmutableMap.of(); SinkRecord rec = new SinkRecord(SRC_TOPIC_NAME, 0, null, "key", null, value, 0L); worker.save(ImmutableList.of(rec)); UUID commitId = UUID.randomUUID(); Event commitRequest = new Event(config.connectGroupId(), new StartCommit(commitId)); byte[] bytes = AvroUtil.encode(commitRequest); consumer.addRecord(new ConsumerRecord<>(CTL_TOPIC_NAME, 0, 1, "key", bytes)); worker.process(); assertThat(producer.history()).hasSize(2); Event event = AvroUtil.decode(producer.history().get(0).value()); assertThat(event.payload().type()).isEqualTo(PayloadType.DATA_WRITTEN); DataWritten dataWritten = (DataWritten) event.payload(); assertThat(dataWritten.commitId()).isEqualTo(commitId); event = AvroUtil.decode(producer.history().get(1).value()); assertThat(event.type()).isEqualTo(PayloadType.DATA_COMPLETE); DataComplete dataComplete = (DataComplete) event.payload(); assertThat(dataComplete.commitId()).isEqualTo(commitId); assertThat(dataComplete.assignments()).hasSize(1); assertThat(dataComplete.assignments().get(0).offset()).isEqualTo(1L); } }
public static String stripSuffix(final String value, final String suffix) { if (value == null || suffix == null) { return value; } if (value.endsWith(suffix)) { return value.substring(0, value.length() - suffix.length()); } return value; }
@Test public void shouldStripSuffixes() { assertThat(URISupport.stripSuffix(null, null)).isNull(); assertThat(URISupport.stripSuffix("", null)).isEmpty(); assertThat(URISupport.stripSuffix(null, "")).isNull(); assertThat(URISupport.stripSuffix("", "")).isEmpty(); assertThat(URISupport.stripSuffix("a", "b")).isEqualTo("a"); assertThat(URISupport.stripSuffix("a", "a")).isEmpty(); assertThat(URISupport.stripSuffix("ab", "b")).isEqualTo("a"); assertThat(URISupport.stripSuffix("a", "ab")).isEqualTo("a"); }
@Override public RouteContext route(final ShardingRule shardingRule) { RouteContext result = new RouteContext(); String dataSourceName = getDataSourceName(shardingRule.getDataSourceNames()); RouteMapper dataSourceMapper = new RouteMapper(dataSourceName, dataSourceName); if (logicTables.isEmpty()) { result.getRouteUnits().add(new RouteUnit(dataSourceMapper, Collections.emptyList())); } else if (1 == logicTables.size()) { String logicTableName = logicTables.iterator().next(); if (!shardingRule.findShardingTable(logicTableName).isPresent()) { result.getRouteUnits().add(new RouteUnit(dataSourceMapper, Collections.emptyList())); return result; } DataNode dataNode = shardingRule.getDataNode(logicTableName); result.getRouteUnits().add(new RouteUnit(new RouteMapper(dataNode.getDataSourceName(), dataNode.getDataSourceName()), Collections.singletonList(new RouteMapper(logicTableName, dataNode.getTableName())))); } else { routeWithMultipleTables(result, shardingRule); } return result; }
@Test void assertRoutingForNoTable() { RouteContext actual = new ShardingUnicastRoutingEngine(mock(SQLStatementContext.class), Collections.emptyList(), new ConnectionContext(Collections::emptySet)).route(shardingRule); assertThat(actual.getRouteUnits().size(), is(1)); }
static String buildRedirect(String redirectUri, Map<String, Object> params) { String paramsString = params.entrySet() .stream() .map(e -> e.getKey() + "=" + urlEncode(String.valueOf(e.getValue()))) .collect(Collectors.joining("&")); if (redirectUri.contains("?")) { return redirectUri + "&" + paramsString; } return redirectUri + "?" + paramsString; }
@Test public void testBuildRedirectParam() { String url = OAuth2AuthorizeController.buildRedirect("http://hsweb.me/callback?a=b", Collections.singletonMap("code", "1234")); assertEquals(url,"http://hsweb.me/callback?a=b&code=1234"); }
static AnnotatedClusterState generatedStateFrom(final Params params) { final ContentCluster cluster = params.cluster; final ClusterState workingState = ClusterState.emptyState(); final Map<Node, NodeStateReason> nodeStateReasons = new HashMap<>(); for (final NodeInfo nodeInfo : cluster.getNodeInfos()) { final NodeState nodeState = computeEffectiveNodeState(nodeInfo, params, nodeStateReasons); workingState.setNodeState(nodeInfo.getNode(), nodeState); } takeDownGroupsWithTooLowAvailability(workingState, nodeStateReasons, params); final Optional<ClusterStateReason> reasonToBeDown = clusterDownReason(workingState, params); if (reasonToBeDown.isPresent()) { workingState.setClusterState(State.DOWN); } workingState.setDistributionBits(inferDistributionBitCount(cluster, workingState, params)); return new AnnotatedClusterState(workingState, reasonToBeDown, nodeStateReasons); }
@Test void configured_zero_init_progress_time_disables_auto_init_to_down_feature() { final ClusterFixture fixture = ClusterFixture.forFlatCluster(3) .bringEntireClusterUp() .reportStorageNodeState(0, new NodeState(NodeType.STORAGE, State.INITIALIZING).setInitProgress(0.5f)); final NodeInfo nodeInfo = fixture.cluster.getNodeInfo(new Node(NodeType.STORAGE, 0)); nodeInfo.setInitProgressTime(10_000); final ClusterStateGenerator.Params params = fixture.generatorParams() .maxInitProgressTime(0) .currentTimeInMillis(11_000); final AnnotatedClusterState state = ClusterStateGenerator.generatedStateFrom(params); assertThat(state.toString(), equalTo("distributor:3 storage:3 .0.s:i .0.i:0.5")); }
public void writeIntLenenc(final long value) { if (value < 0xfb) { byteBuf.writeByte((int) value); return; } if (value < Math.pow(2D, 16D)) { byteBuf.writeByte(0xfc); byteBuf.writeShortLE((int) value); return; } if (value < Math.pow(2D, 24D)) { byteBuf.writeByte(0xfd); byteBuf.writeMediumLE((int) value); return; } byteBuf.writeByte(0xfe); byteBuf.writeLongLE(value); }
@Test void assertWriteIntLenencWithOneByte() { new MySQLPacketPayload(byteBuf, StandardCharsets.UTF_8).writeIntLenenc(1L); verify(byteBuf).writeByte(1); }
@Override public <KeyT> void onTimer( String timerId, String timerFamilyId, KeyT key, BoundedWindow window, Instant timestamp, Instant outputTimestamp, TimeDomain timeDomain) { Preconditions.checkNotNull(outputTimestamp, "outputTimestamp"); OnTimerArgumentProvider<KeyT> argumentProvider = new OnTimerArgumentProvider<>(timerId, key, window, timestamp, outputTimestamp, timeDomain); invoker.invokeOnTimer(timerId, timerFamilyId, argumentProvider); }
@Test public void testOnTimerExceptionsWrappedAsUserCodeException() { ThrowingDoFn fn = new ThrowingDoFn(); DoFnRunner<String, String> runner = new SimpleDoFnRunner<>( null, fn, NullSideInputReader.empty(), null, null, Collections.emptyList(), mockStepContext, null, Collections.emptyMap(), WindowingStrategy.of(new GlobalWindows()), DoFnSchemaInformation.create(), Collections.emptyMap()); thrown.expect(UserCodeException.class); thrown.expectCause(is(fn.exceptionToThrow)); runner.onTimer( TimerDeclaration.PREFIX + ThrowingDoFn.TIMER_ID, "", null, GlobalWindow.INSTANCE, new Instant(0), new Instant(0), TimeDomain.EVENT_TIME); }
public void moveFactMapping(int oldIndex, int newIndex) { if (oldIndex < 0 || oldIndex >= factMappings.size()) { throw new IndexOutOfBoundsException(new StringBuilder().append("Index ").append(oldIndex) .append(" not found in the list").toString()); } if (newIndex < 0 || newIndex >= factMappings.size()) { throw new IndexOutOfBoundsException(new StringBuilder().append("Index ").append(newIndex) .append(" out of range").toString()); } FactMapping factMapping = factMappings.get(oldIndex); factMappings.remove(oldIndex); factMappings.add(newIndex, factMapping); }
@Test public void moveFactMappingTest() { ExpressionIdentifier expressionIdentifier2 = ExpressionIdentifier.create("Test expression 2", GIVEN); ExpressionIdentifier expressionIdentifier3 = ExpressionIdentifier.create("Test expression 3", GIVEN); FactMapping factMapping1 = modelDescriptor.addFactMapping(factIdentifier, expressionIdentifier); FactMapping factMapping2 = modelDescriptor.addFactMapping(factIdentifier, expressionIdentifier2); FactMapping factMapping3 = modelDescriptor.addFactMapping(factIdentifier, expressionIdentifier3); assertThat(modelDescriptor.getUnmodifiableFactMappings()).containsExactly(factMapping1, factMapping2, factMapping3); modelDescriptor.moveFactMapping(0, 1); assertThat(modelDescriptor.getUnmodifiableFactMappings()).containsExactly(factMapping2, factMapping1, factMapping3); }
public Trans loadTransFromFilesystem( String initialDir, String filename, String jarFilename, Serializable base64Zip ) throws Exception { Trans trans = null; File zip; if ( base64Zip != null && ( zip = decodeBase64ToZipFile( base64Zip, true ) ) != null ) { // update filename to a meaningful, 'ETL-file-within-zip' syntax filename = "zip:file:" + File.separator + File.separator + zip.getAbsolutePath() + "!" + filename; } // Try to load the transformation from file if ( !Utils.isEmpty( filename ) ) { String filepath = filename; // If the filename starts with scheme like zip:, then isAbsolute() will return false even though the // the path following the zip is absolute. Check for isAbsolute only if the fileName does not start with scheme if ( !KettleVFS.startsWithScheme( filename ) && !FileUtil.isFullyQualified( filename ) ) { filepath = initialDir + filename; } logDebug( "Pan.Log.LoadingTransXML", "" + filepath ); TransMeta transMeta = new TransMeta( filepath ); trans = new Trans( transMeta ); } if ( !Utils.isEmpty( jarFilename ) ) { try { logDebug( "Pan.Log.LoadingTransJar", jarFilename ); InputStream inputStream = PanCommandExecutor.class.getResourceAsStream( jarFilename ); StringBuilder xml = new StringBuilder(); int c; while ( ( c = inputStream.read() ) != -1 ) { xml.append( (char) c ); } inputStream.close(); Document document = XMLHandler.loadXMLString( xml.toString() ); TransMeta transMeta = new TransMeta( XMLHandler.getSubNode( document, "transformation" ), null ); trans = new Trans( transMeta ); } catch ( Exception e ) { System.out.println( BaseMessages.getString( getPkgClazz(), "Pan.Error.ReadingJar", e.toString() ) ); System.out.println( Const.getStackTracker( e ) ); throw e; } } if ( trans != null ) { trans.setMetaStore( getMetaStore() ); } return trans; }
@Test public void testFilesystemBase64Zip() throws Exception { String fileName = "test.ktr"; File zipFile = new File( getClass().getResource( "testKtrArchive.zip" ).toURI() ); String base64Zip = Base64.getEncoder().encodeToString( FileUtils.readFileToByteArray( zipFile ) ); Trans trans = mockedPanCommandExecutor.loadTransFromFilesystem( "", fileName, "", base64Zip ); assertNotNull( trans ); }
@Override public boolean ownInsertsAreVisible(final int type) { return false; }
@Test void assertOwnInsertsAreVisible() { assertFalse(metaData.ownInsertsAreVisible(0)); }
@Override public Set<Pair<WorkerInfo, SerializableVoid>> selectExecutors(PersistConfig config, List<WorkerInfo> jobWorkerInfoList, SelectExecutorsContext context) throws Exception { if (jobWorkerInfoList.isEmpty()) { throw new RuntimeException("No worker is available"); } AlluxioURI uri = new AlluxioURI(config.getFilePath()); List<BlockWorkerInfo> alluxioWorkerInfoList = context.getFsContext().getCachedWorkers() .stream() .map(w -> new BlockWorkerInfo( w.getIdentity(), w.getAddress(), w.getCapacityBytes(), w.getUsedBytes())) .collect(Collectors.toList()); BlockWorkerInfo workerWithMostBlocks = JobUtils.getWorkerWithMostBlocks(alluxioWorkerInfoList, context.getFileSystem().getStatus(uri).getFileBlockInfos()); // Map the best Alluxio worker to a job worker. Set<Pair<WorkerInfo, SerializableVoid>> result = Sets.newHashSet(); boolean found = false; if (workerWithMostBlocks != null) { for (WorkerInfo workerInfo : jobWorkerInfoList) { if (workerInfo.getAddress().getHost() .equals(workerWithMostBlocks.getNetAddress().getHost())) { result.add(new Pair<>(workerInfo, null)); found = true; break; } } } if (!found) { result.add(new Pair<>( jobWorkerInfoList.get(new Random().nextInt(jobWorkerInfoList.size())), null)); } return result; }
@Test public void selectExecutorsTest() throws Exception { AlluxioURI uri = new AlluxioURI("/test"); PersistConfig config = new PersistConfig(uri.getPath(), -1, true, ""); WorkerNetAddress workerNetAddress1 = new WorkerNetAddress().setDataPort(10).setHost("host1"); WorkerIdentity workerIdentity1 = WorkerIdentityTestUtils.randomLegacyId(); WorkerNetAddress workerNetAddress2 = new WorkerNetAddress().setDataPort(100).setHost("host2"); WorkerIdentity workerIdentity2 = WorkerIdentityTestUtils.randomLegacyId(); WorkerInfo blockWorkerInfo1 = new WorkerInfo().setIdentity(workerIdentity1).setAddress(workerNetAddress1) .setCapacityBytes(1).setUsedBytes(1); WorkerInfo blockWorkerInfo2 = new WorkerInfo().setIdentity(workerIdentity2).setAddress(workerNetAddress2) .setCapacityBytes(1).setUsedBytes(1); WorkerInfo workerInfo1 = new WorkerInfo() .setIdentity(workerIdentity1) .setAddress(workerNetAddress1); WorkerInfo workerInfo2 = new WorkerInfo() .setIdentity(workerIdentity2) .setAddress(workerNetAddress2); FileBlockInfo fileBlockInfo1 = mockFileBlockInfo(1, workerNetAddress2); FileBlockInfo fileBlockInfo2 = mockFileBlockInfo(2, workerNetAddress1); FileBlockInfo fileBlockInfo3 = mockFileBlockInfo(3, workerNetAddress1); FileInfo testFileInfo = new FileInfo(); testFileInfo.setFileBlockInfos( Lists.newArrayList(fileBlockInfo1, fileBlockInfo2, fileBlockInfo3)); Mockito.when(mMockFileSystemContext.getCachedWorkers()).thenReturn( new WorkerClusterView(Arrays.asList(blockWorkerInfo1, blockWorkerInfo2))); Mockito.when(mMockFileSystem.getStatus(uri)).thenReturn(new URIStatus(testFileInfo)); Set<Pair<WorkerInfo, SerializableVoid>> result = new PersistDefinition() .selectExecutors(config, Lists.newArrayList(workerInfo2, workerInfo1), new SelectExecutorsContext(1, mJobServerContext)); Assert.assertEquals(1, result.size()); Assert.assertEquals(workerInfo1, result.iterator().next().getFirst()); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStartBounds, final Range<Instant> windowEndBounds, final Optional<Position> position ) { try { final ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>> store = stateStore .store(QueryableStoreTypes.timestampedWindowStore(), partition); final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds); final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds); try (WindowStoreIterator<ValueAndTimestamp<GenericRow>> it = cacheBypassFetcher.fetch(store, key, lower, upper)) { final Builder<WindowedRow> builder = ImmutableList.builder(); while (it.hasNext()) { final KeyValue<Long, ValueAndTimestamp<GenericRow>> next = it.next(); final Instant windowStart = Instant.ofEpochMilli(next.key); if (!windowStartBounds.contains(windowStart)) { continue; } final Instant windowEnd = windowStart.plus(windowSize); if (!windowEndBounds.contains(windowEnd)) { continue; } final TimeWindow window = new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli()); final WindowedRow row = WindowedRow.of( stateStore.schema(), new Windowed<>(key, window), next.value.value(), next.value.timestamp() ); builder.add(row); } return KsMaterializedQueryResult.rowIterator(builder.build().iterator()); } } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldCloseIterator() { // When: table.get(A_KEY, PARTITION, WINDOW_START_BOUNDS, WINDOW_END_BOUNDS); // Then: verify(fetchIterator).close(); }
public static void notNullOrEmpty(String string) { notNullOrEmpty(string, String.format("string [%s] is null or empty", string)); }
@Test public void testNotNull1NotEmpty6() { assertThrows(IllegalArgumentException.class, () -> Precondition.notNullOrEmpty("\t")); }
public static String getTypeName(final int type) { switch (type) { case START_EVENT_V3: return "Start_v3"; case STOP_EVENT: return "Stop"; case QUERY_EVENT: return "Query"; case ROTATE_EVENT: return "Rotate"; case INTVAR_EVENT: return "Intvar"; case LOAD_EVENT: return "Load"; case NEW_LOAD_EVENT: return "New_load"; case SLAVE_EVENT: return "Slave"; case CREATE_FILE_EVENT: return "Create_file"; case APPEND_BLOCK_EVENT: return "Append_block"; case DELETE_FILE_EVENT: return "Delete_file"; case EXEC_LOAD_EVENT: return "Exec_load"; case RAND_EVENT: return "RAND"; case XID_EVENT: return "Xid"; case USER_VAR_EVENT: return "User var"; case FORMAT_DESCRIPTION_EVENT: return "Format_desc"; case TABLE_MAP_EVENT: return "Table_map"; case PRE_GA_WRITE_ROWS_EVENT: return "Write_rows_event_old"; case PRE_GA_UPDATE_ROWS_EVENT: return "Update_rows_event_old"; case PRE_GA_DELETE_ROWS_EVENT: return "Delete_rows_event_old"; case WRITE_ROWS_EVENT_V1: return "Write_rows_v1"; case UPDATE_ROWS_EVENT_V1: return "Update_rows_v1"; case DELETE_ROWS_EVENT_V1: return "Delete_rows_v1"; case BEGIN_LOAD_QUERY_EVENT: return "Begin_load_query"; case EXECUTE_LOAD_QUERY_EVENT: return "Execute_load_query"; case INCIDENT_EVENT: return "Incident"; case HEARTBEAT_LOG_EVENT: case HEARTBEAT_LOG_EVENT_V2: return "Heartbeat"; case IGNORABLE_LOG_EVENT: return "Ignorable"; case ROWS_QUERY_LOG_EVENT: return "Rows_query"; case WRITE_ROWS_EVENT: return "Write_rows"; case UPDATE_ROWS_EVENT: return "Update_rows"; case DELETE_ROWS_EVENT: return "Delete_rows"; case GTID_LOG_EVENT: return "Gtid"; case ANONYMOUS_GTID_LOG_EVENT: return "Anonymous_Gtid"; case PREVIOUS_GTIDS_LOG_EVENT: return "Previous_gtids"; case PARTIAL_UPDATE_ROWS_EVENT: return "Update_rows_partial"; case TRANSACTION_CONTEXT_EVENT : return "Transaction_context"; case VIEW_CHANGE_EVENT : return "view_change"; case XA_PREPARE_LOG_EVENT : return "Xa_prepare"; case TRANSACTION_PAYLOAD_EVENT : return "transaction_payload"; default: return "Unknown type:" + type; } }
@Test public void getTypeNameInputPositiveOutputNotNull16() { // Arrange final int type = 30; // Act final String actual = LogEvent.getTypeName(type); // Assert result Assert.assertEquals("Write_rows", actual); }
@PostMapping("/token") @PermitAll @Operation(summary = "获得访问令牌", description = "适合 code 授权码模式,或者 implicit 简化模式;在 sso.vue 单点登录界面被【获取】调用") @Parameters({ @Parameter(name = "grant_type", required = true, description = "授权类型", example = "code"), @Parameter(name = "code", description = "授权范围", example = "userinfo.read"), @Parameter(name = "redirect_uri", description = "重定向 URI", example = "https://www.iocoder.cn"), @Parameter(name = "state", description = "状态", example = "1"), @Parameter(name = "username", example = "tudou"), @Parameter(name = "password", example = "cai"), // 多个使用空格分隔 @Parameter(name = "scope", example = "user_info"), @Parameter(name = "refresh_token", example = "123424233"), }) public CommonResult<OAuth2OpenAccessTokenRespVO> postAccessToken(HttpServletRequest request, @RequestParam("grant_type") String grantType, @RequestParam(value = "code", required = false) String code, // 授权码模式 @RequestParam(value = "redirect_uri", required = false) String redirectUri, // 授权码模式 @RequestParam(value = "state", required = false) String state, // 授权码模式 @RequestParam(value = "username", required = false) String username, // 密码模式 @RequestParam(value = "password", required = false) String password, // 密码模式 @RequestParam(value = "scope", required = false) String scope, // 密码模式 @RequestParam(value = "refresh_token", required = false) String refreshToken) { // 刷新模式 List<String> scopes = OAuth2Utils.buildScopes(scope); // 1.1 校验授权类型 OAuth2GrantTypeEnum grantTypeEnum = OAuth2GrantTypeEnum.getByGrantType(grantType); if (grantTypeEnum == null) { throw exception0(BAD_REQUEST.getCode(), StrUtil.format("未知授权类型({})", grantType)); } if (grantTypeEnum == OAuth2GrantTypeEnum.IMPLICIT) { throw exception0(BAD_REQUEST.getCode(), "Token 接口不支持 implicit 授权模式"); } // 1.2 校验客户端 String[] clientIdAndSecret = obtainBasicAuthorization(request); OAuth2ClientDO client = oauth2ClientService.validOAuthClientFromCache(clientIdAndSecret[0], clientIdAndSecret[1], grantType, scopes, redirectUri); // 2. 根据授权模式,获取访问令牌 OAuth2AccessTokenDO accessTokenDO; switch (grantTypeEnum) { case AUTHORIZATION_CODE: accessTokenDO = oauth2GrantService.grantAuthorizationCodeForAccessToken(client.getClientId(), code, redirectUri, state); break; case PASSWORD: accessTokenDO = oauth2GrantService.grantPassword(username, password, client.getClientId(), scopes); break; case CLIENT_CREDENTIALS: accessTokenDO = oauth2GrantService.grantClientCredentials(client.getClientId(), scopes); break; case REFRESH_TOKEN: accessTokenDO = oauth2GrantService.grantRefreshToken(refreshToken, client.getClientId()); break; default: throw new IllegalArgumentException("未知授权类型:" + grantType); } Assert.notNull(accessTokenDO, "访问令牌不能为空"); // 防御性检查 return success(OAuth2OpenConvert.INSTANCE.convert(accessTokenDO)); }
@Test public void testPostAccessToken_refreshToken() { // 准备参数 String granType = OAuth2GrantTypeEnum.REFRESH_TOKEN.getGrantType(); String refreshToken = randomString(); String password = randomString(); HttpServletRequest request = mockRequest("test_client_id", "test_client_secret"); // mock 方法(client) OAuth2ClientDO client = randomPojo(OAuth2ClientDO.class).setClientId("test_client_id"); when(oauth2ClientService.validOAuthClientFromCache(eq("test_client_id"), eq("test_client_secret"), eq(granType), eq(Lists.newArrayList()), isNull())).thenReturn(client); // mock 方法(访问令牌) OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class) .setExpiresTime(LocalDateTimeUtil.offset(LocalDateTime.now(), 30000L, ChronoUnit.MILLIS)); when(oauth2GrantService.grantRefreshToken(eq(refreshToken), eq("test_client_id"))).thenReturn(accessTokenDO); // 调用 CommonResult<OAuth2OpenAccessTokenRespVO> result = oauth2OpenController.postAccessToken(request, granType, null, null, null, null, password, null, refreshToken); // 断言 assertEquals(0, result.getCode()); assertPojoEquals(accessTokenDO, result.getData()); assertTrue(ObjectUtils.equalsAny(result.getData().getExpiresIn(), 29L, 30L)); // 执行过程会过去几毫秒 }
protected boolean shouldAnalyze() { if (analyzer instanceof FileTypeAnalyzer) { final FileTypeAnalyzer fileTypeAnalyzer = (FileTypeAnalyzer) analyzer; return fileTypeAnalyzer.accept(dependency.getActualFile()); } return true; }
@Test public void shouldAnalyzeReturnsTrueForNonFileTypeAnalyzers() { AnalysisTask instance = new AnalysisTask(new HintAnalyzer(), null, null, null); boolean shouldAnalyze = instance.shouldAnalyze(); assertTrue(shouldAnalyze); }
public static <T> T clone(T value) { return ObjectMapperWrapper.INSTANCE.clone(value); }
@Test public void cloneDeserializeStepErrorTest() { MyEntity entity = new MyEntity(); entity.setValue("some value"); entity.setPojos(Arrays.asList( createMyPojo("first value", MyType.A, "1.1", createOtherPojo("USD")), createMyPojo("second value", MyType.B, "1.2", createOtherPojo("BRL")) )); MyEntity clone = JacksonUtil.clone(entity); assertEquals(clone, entity); List<MyPojo> clonePojos = JacksonUtil.clone(entity.getPojos()); assertEquals(clonePojos, entity.getPojos()); }
public static ByteBuffer copy(ByteBuffer src, int start, int end) { return copy(src, ByteBuffer.allocate(end - start)); }
@Test public void copyTest() { byte[] bytes = "AAABBB".getBytes(); ByteBuffer buffer = ByteBuffer.wrap(bytes); ByteBuffer buffer2 = BufferUtil.copy(buffer, ByteBuffer.allocate(5)); assertEquals("AAABB", StrUtil.utf8Str(buffer2)); }
@Override public List<Instance> getAllInstances(String serviceName) throws NacosException { return getAllInstances(serviceName, new ArrayList<>()); }
@Test void testGetAllInstances7() throws NacosException { //given String serviceName = "service1"; List<String> clusterList = Arrays.asList("cluster1", "cluster2"); //when client.getAllInstances(serviceName, clusterList, false); //then verify(proxy, times(1)).queryInstancesOfService(serviceName, Constants.DEFAULT_GROUP, "cluster1,cluster2", false); }
public void add(Boolean bool) { elements.add(bool == null ? JsonNull.INSTANCE : new JsonPrimitive(bool)); }
@Test public void testBooleanPrimitiveAddition() { JsonArray jsonArray = new JsonArray(); jsonArray.add(true); jsonArray.add(true); jsonArray.add(false); jsonArray.add(false); jsonArray.add((Boolean) null); jsonArray.add(true); assertThat(jsonArray.toString()).isEqualTo("[true,true,false,false,null,true]"); }
public static ParseResult parse(String text) { Map<String, String> localProperties = new HashMap<>(); String intpText = ""; String scriptText = null; Matcher matcher = REPL_PATTERN.matcher(text); if (matcher.find()) { String headingSpace = matcher.group(1); intpText = matcher.group(2); int startPos = headingSpace.length() + intpText.length() + 1; if (startPos < text.length() && text.charAt(startPos) == '(') { startPos = parseLocalProperties(text, startPos, localProperties); } scriptText = text.substring(startPos); } else { intpText = ""; scriptText = text; } return new ParseResult(intpText, removeLeadingWhiteSpaces(scriptText), localProperties); }
@Test void testParagraphTextUnfinishedQuote() { assertThrows(RuntimeException.class, () -> ParagraphTextParser.parse("%spark.pyspark(pool=\"2314234) sc.version"), "Problems by parsing paragraph. Not finished interpreter configuration"); }
public static int getParentOrder(int nodeOrder) { return (nodeOrder - 1) >> 1; }
@Test public void testGetParentOrder() { assertEquals(0, MerkleTreeUtil.getParentOrder(1)); assertEquals(0, MerkleTreeUtil.getParentOrder(2)); assertEquals(1, MerkleTreeUtil.getParentOrder(3)); assertEquals(1, MerkleTreeUtil.getParentOrder(4)); assertEquals(2, MerkleTreeUtil.getParentOrder(5)); assertEquals(2, MerkleTreeUtil.getParentOrder(6)); }
public synchronized ListenableFuture<?> waitForMinimumCoordinatorSidecars() { if (currentCoordinatorSidecarCount == 1 || !isCoordinatorSidecarEnabled) { return immediateFuture(null); } SettableFuture<?> future = SettableFuture.create(); coordinatorSidecarSizeFutures.add(future); // if future does not finish in wait period, complete with an exception ScheduledFuture<?> timeoutTask = executor.schedule( () -> { synchronized (this) { future.setException(new PrestoException( NO_CPP_SIDECARS, format("Insufficient active coordinator sidecar nodes. Waited %s for at least 1 coordinator sidecars, but only 0 coordinator sidecars are active", coordinatorSidecarMaxWait))); } }, coordinatorSidecarMaxWait.toMillis(), MILLISECONDS); // remove future if finished (e.g., canceled, timed out) future.addListener(() -> { timeoutTask.cancel(true); removeCoordinatorSidecarFuture(future); }, executor); return future; }
@Test(timeOut = 10_000) public void testTimeoutWaitingForCoordinatorSidecars() throws InterruptedException { waitForMinimumCoordinatorSidecars(); assertFalse(coordinatorSidecarsTimeout.get()); assertEquals(minCoordinatorSidecarsLatch.getCount(), 1); Thread.sleep(SECONDS.toMillis(5)); assertTrue(coordinatorSidecarsTimeout.get()); assertEquals(minCoordinatorSidecarsLatch.getCount(), 0); }
public static long toMillis(long day, long hour, long minute, long second, long millis) { try { long value = millis; value = addExact(value, multiplyExact(day, MILLIS_IN_DAY)); value = addExact(value, multiplyExact(hour, MILLIS_IN_HOUR)); value = addExact(value, multiplyExact(minute, MILLIS_IN_MINUTE)); value = addExact(value, multiplyExact(second, MILLIS_IN_SECOND)); return value; } catch (ArithmeticException e) { throw new IllegalArgumentException(e); } }
@Test public void testFormat() { assertMillis(0, "0 00:00:00.000"); assertMillis(1, "0 00:00:00.001"); assertMillis(-1, "-0 00:00:00.001"); assertMillis(toMillis(12, 13, 45, 56, 789), "12 13:45:56.789"); assertMillis(toMillis(-12, -13, -45, -56, -789), "-12 13:45:56.789"); assertMillis(Long.MAX_VALUE, "106751991167 07:12:55.807"); assertMillis(Long.MIN_VALUE + 1, "-106751991167 07:12:55.807"); assertMillis(Long.MIN_VALUE, "-106751991167 07:12:55.808"); }
@Override public CompletableFuture<Void> deleteTopicInNameserver(String address, DeleteTopicFromNamesrvRequestHeader requestHeader, long timeoutMillis) { CompletableFuture<Void> future = new CompletableFuture<>(); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.DELETE_TOPIC_IN_NAMESRV, requestHeader); remotingClient.invoke(address, request, timeoutMillis).thenAccept(response -> { if (response.getCode() == ResponseCode.SUCCESS) { future.complete(null); } else { log.warn("deleteTopicInNameserver getResponseCommand failed, {} {}, header={}", response.getCode(), response.getRemark(), requestHeader); future.completeExceptionally(new MQClientException(response.getCode(), response.getRemark())); } }); return future; }
@Test public void assertDeleteTopicInNameserverWithSuccess() throws Exception { setResponseSuccess(null); DeleteTopicFromNamesrvRequestHeader requestHeader = mock(DeleteTopicFromNamesrvRequestHeader.class); CompletableFuture<Void> actual = mqClientAdminImpl.deleteTopicInNameserver(defaultBrokerAddr, requestHeader, defaultTimeout); assertNull(actual.get()); }
public static AnnotateImagesFromBytes annotateImagesFromBytes( PCollectionView<Map<ByteString, ImageContext>> contextSideInput, List<Feature> features, long batchSize, int desiredRequestParallelism) { return new AnnotateImagesFromBytes( contextSideInput, features, batchSize, desiredRequestParallelism); }
@Test public void shouldConvertByteStringToRequest() { CloudVision.AnnotateImagesFromBytes annotateImagesFromBytes = CloudVision.annotateImagesFromBytes(null, features, 1, 1); AnnotateImageRequest request = annotateImagesFromBytes.mapToRequest(TEST_BYTES, null); assertEquals(1, request.getFeaturesCount()); assertEquals(TEST_BYTES, request.getImage().getContent()); }
@Override public Response updateSchedulerConfiguration(SchedConfUpdateInfo mutationInfo, HttpServletRequest hsr) throws AuthorizationException, InterruptedException { // Make Sure mutationInfo is not null. if (mutationInfo == null) { routerMetrics.incrUpdateSchedulerConfigurationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_SCHEDULER_CONFIGURATION, UNKNOWN, TARGET_WEB_SERVICE, "Parameter error, the schedConfUpdateInfo is empty or null."); throw new IllegalArgumentException( "Parameter error, the schedConfUpdateInfo is empty or null."); } // In federated mode, we may have a mix of multiple schedulers. // In order to ensure accurate update scheduler configuration, // we need users to explicitly set subClusterId. String pSubClusterId = mutationInfo.getSubClusterId(); if (StringUtils.isBlank(pSubClusterId)) { routerMetrics.incrUpdateSchedulerConfigurationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_SCHEDULER_CONFIGURATION, UNKNOWN, TARGET_WEB_SERVICE, "Parameter error, the subClusterId is empty or null."); throw new IllegalArgumentException("Parameter error, " + "the subClusterId is empty or null."); } // Get the subClusterInfo , then update the scheduler configuration. try { long startTime = clock.getTime(); SubClusterInfo subClusterInfo = getActiveSubCluster(pSubClusterId); DefaultRequestInterceptorREST interceptor = getOrCreateInterceptorForSubCluster( subClusterInfo.getSubClusterId(), subClusterInfo.getRMWebServiceAddress()); Response response = interceptor.updateSchedulerConfiguration(mutationInfo, hsr); if (response != null) { long endTime = clock.getTime(); routerMetrics.succeededUpdateSchedulerConfigurationRetrieved(endTime - startTime); RouterAuditLogger.logSuccess(getUser().getShortUserName(), UPDATE_SCHEDULER_CONFIGURATION, TARGET_WEB_SERVICE); return Response.status(response.getStatus()).entity(response.getEntity()).build(); } } catch (NotFoundException e) { routerMetrics.incrUpdateSchedulerConfigurationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_SCHEDULER_CONFIGURATION, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); RouterServerUtil.logAndThrowRunTimeException(e, "Get subCluster error. subClusterId = %s", pSubClusterId); } catch (Exception e) { routerMetrics.incrUpdateSchedulerConfigurationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_SCHEDULER_CONFIGURATION, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); RouterServerUtil.logAndThrowRunTimeException(e, "UpdateSchedulerConfiguration error. subClusterId = %s", pSubClusterId); } routerMetrics.incrUpdateSchedulerConfigurationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_SCHEDULER_CONFIGURATION, UNKNOWN, TARGET_WEB_SERVICE, "UpdateSchedulerConfiguration Failed."); throw new RuntimeException("UpdateSchedulerConfiguration error. subClusterId = " + pSubClusterId); }
@Test public void testUpdateSchedulerConfigurationErrorMsg() throws Exception { SchedConfUpdateInfo mutationInfo = new SchedConfUpdateInfo(); LambdaTestUtils.intercept(IllegalArgumentException.class, "Parameter error, the subClusterId is empty or null.", () -> interceptor.updateSchedulerConfiguration(mutationInfo, null)); LambdaTestUtils.intercept(IllegalArgumentException.class, "Parameter error, the schedConfUpdateInfo is empty or null.", () -> interceptor.updateSchedulerConfiguration(null, null)); }
@VisibleForTesting static SortedMap<OffsetRange, Integer> computeOverlappingRanges(Iterable<OffsetRange> ranges) { ImmutableSortedMap.Builder<OffsetRange, Integer> rval = ImmutableSortedMap.orderedBy(OffsetRangeComparator.INSTANCE); List<OffsetRange> sortedRanges = Lists.newArrayList(ranges); if (sortedRanges.isEmpty()) { return rval.build(); } Collections.sort(sortedRanges, OffsetRangeComparator.INSTANCE); // Stores ranges in smallest 'from' and then smallest 'to' order // e.g. [2, 7), [3, 4), [3, 5), [3, 5), [3, 6), [4, 0) PriorityQueue<OffsetRange> rangesWithSameFrom = new PriorityQueue<>(OffsetRangeComparator.INSTANCE); Iterator<OffsetRange> iterator = sortedRanges.iterator(); // Stored in reverse sorted order so that when we iterate and re-add them back to // overlappingRanges they are stored in sorted order from smallest to largest range.to List<OffsetRange> rangesToProcess = new ArrayList<>(); while (iterator.hasNext()) { OffsetRange current = iterator.next(); // Skip empty ranges if (current.getFrom() == current.getTo()) { continue; } // If the current range has a different 'from' then a prior range then we must produce // ranges in [rangesWithSameFrom.from, current.from) while (!rangesWithSameFrom.isEmpty() && rangesWithSameFrom.peek().getFrom() != current.getFrom()) { rangesToProcess.addAll(rangesWithSameFrom); Collections.sort(rangesToProcess, OffsetRangeComparator.INSTANCE); rangesWithSameFrom.clear(); int i = 0; long lastTo = rangesToProcess.get(i).getFrom(); // Output all the ranges that are strictly less then current.from // e.g. current.to := 7 for [3, 4), [3, 5), [3, 5), [3, 6) will produce // [3, 4) := 4 // [4, 5) := 3 // [5, 6) := 1 for (; i < rangesToProcess.size(); ++i) { if (rangesToProcess.get(i).getTo() > current.getFrom()) { break; } // Output only the first of any subsequent duplicate ranges if (i == 0 || rangesToProcess.get(i - 1).getTo() != rangesToProcess.get(i).getTo()) { rval.put( new OffsetRange(lastTo, rangesToProcess.get(i).getTo()), rangesToProcess.size() - i); lastTo = rangesToProcess.get(i).getTo(); } } // We exitted the loop with 'to' > current.from, we must add the range [lastTo, // current.from) if it is non-empty if (lastTo < current.getFrom() && i != rangesToProcess.size()) { rval.put(new OffsetRange(lastTo, current.getFrom()), rangesToProcess.size() - i); } // The remaining ranges have a 'to' that is greater then 'current.from' and will overlap // with current so add them back to rangesWithSameFrom with the updated 'from' for (; i < rangesToProcess.size(); ++i) { rangesWithSameFrom.add( new OffsetRange(current.getFrom(), rangesToProcess.get(i).getTo())); } rangesToProcess.clear(); } rangesWithSameFrom.add(current); } // Process the last chunk of overlapping ranges while (!rangesWithSameFrom.isEmpty()) { // This range always represents the range with with the smallest 'to' OffsetRange current = rangesWithSameFrom.remove(); rangesToProcess.addAll(rangesWithSameFrom); Collections.sort(rangesToProcess, OffsetRangeComparator.INSTANCE); rangesWithSameFrom.clear(); rval.put(current, rangesToProcess.size() + 1 /* include current */); // Shorten all the remaining ranges such that they start with current.to for (OffsetRange rangeWithDifferentFrom : rangesToProcess) { // Skip any duplicates of current if (rangeWithDifferentFrom.getTo() > current.getTo()) { rangesWithSameFrom.add(new OffsetRange(current.getTo(), rangeWithDifferentFrom.getTo())); } } rangesToProcess.clear(); } return rval.build(); }
@Test public void testRangesWithAtMostOneOverlap() { Iterable<OffsetRange> ranges = Arrays.asList(range(0, 6), range(4, 10), range(8, 12)); Map<OffsetRange, Integer> nonOverlappingRangesToNumElementsPerPosition = computeOverlappingRanges(ranges); assertEquals( ImmutableMap.builder() .put(range(0, 4), 1) .put(range(4, 6), 2) .put(range(6, 8), 1) .put(range(8, 10), 2) .put(range(10, 12), 1) .build(), nonOverlappingRangesToNumElementsPerPosition); assertNonEmptyRangesAndPositions(ranges, nonOverlappingRangesToNumElementsPerPosition); }
@Restricted(NoExternalUse.class) public static String extractPluginNameFromIconSrc(String iconSrc) { if (iconSrc == null) { return ""; } if (!iconSrc.contains("plugin-")) { return ""; } String[] arr = iconSrc.split(" "); for (String element : arr) { if (element.startsWith("plugin-")) { return element.replaceFirst("plugin-", ""); } } return ""; }
@Test public void extractPluginNameFromIconSrcHandlesEmptyString() { String result = Functions.extractPluginNameFromIconSrc(""); assertThat(result, is(emptyString())); }
Mono<ResponseEntity<Void>> delete(UUID id) { return client.delete() .uri(uriBuilder -> uriBuilder.path("/posts/{id}").build(id)) .exchangeToMono(response -> { if (response.statusCode().equals(HttpStatus.NO_CONTENT)) { return response.toBodilessEntity(); } return response.createError(); }); }
@SneakyThrows @Test public void testDeletePostById() { var id = UUID.randomUUID(); stubFor(delete("/posts/" + id) .willReturn( aResponse() .withStatus(204) ) ); postClient.delete(id) .as(StepVerifier::create) .consumeNextWith( entity -> assertThat(entity.getStatusCode().value()).isEqualTo(204) ) .verifyComplete(); verify(deleteRequestedFor(urlEqualTo("/posts/" + id))); }
public static <E> List<E> ensureImmutable(List<E> list) { if (list.isEmpty()) return Collections.emptyList(); // Faster to make a copy than check the type to see if it is already a singleton list if (list.size() == 1) return Collections.singletonList(list.get(0)); if (isImmutable(list)) return list; return Collections.unmodifiableList(new ArrayList<E>(list)); }
@Test void ensureImmutable_convertsToSingletonList() { List<Object> list = new ArrayList<>(); list.add("foo"); assertThat(Lists.ensureImmutable(list).getClass().getSimpleName()) .isEqualTo("SingletonList"); }
public Optional<Object> retrieveSingleValue(final Object jsonObject, final String valueName) { final Map<String, Object> map = objectMapper.convertValue(jsonObject, new TypeReference<>() {}); return Optional.ofNullable(map.get(valueName)); }
@Test void testRetrievesEmptyOptionalOnWrongValueName() { Optional<Object> value = toTest.retrieveSingleValue(new TestJson(42, "ho!"), "carramba!"); assertTrue(value.isEmpty()); }
public static String getName(File file) { return FileNameUtil.getName(file); }
@Test public void getNameTest() { String path = "d:\\aaa\\bbb\\cc\\ddd\\"; String name = FileUtil.getName(path); assertEquals("ddd", name); path = "d:\\aaa\\bbb\\cc\\ddd.jpg"; name = FileUtil.getName(path); assertEquals("ddd.jpg", name); }
@Nonnull @Override public Result addChunk(ByteBuf buffer) { final byte[] readable = new byte[buffer.readableBytes()]; buffer.readBytes(readable, buffer.readerIndex(), buffer.readableBytes()); final GELFMessage msg = new GELFMessage(readable); final ByteBuf aggregatedBuffer; switch (msg.getGELFType()) { case CHUNKED: try { chunkCounter.inc(); aggregatedBuffer = checkForCompletion(msg); if (aggregatedBuffer == null) { return VALID_EMPTY_RESULT; } } catch (IllegalArgumentException | IllegalStateException | IndexOutOfBoundsException e) { log.debug("Invalid gelf message chunk, dropping message.", e); return INVALID_RESULT; } break; case ZLIB: case GZIP: case UNCOMPRESSED: aggregatedBuffer = Unpooled.wrappedBuffer(readable); break; case UNSUPPORTED: return INVALID_RESULT; default: return INVALID_RESULT; } return new Result(aggregatedBuffer, true); }
@Test public void differentIdsDoNotInterfere() { final ByteBuf[] msg1 = createChunkedMessage(4096 + 1, 1024, generateMessageId(1));// 5 chunks; final ByteBuf[] msg2 = createChunkedMessage(4096 + 1, 1024, generateMessageId(2));// 5 chunks; CodecAggregator.Result result1 = null; CodecAggregator.Result result2 = null; for (int i = 0; i < msg1.length; i++) { result1 = aggregator.addChunk(msg1[i]); if (i > 0) { result2 = aggregator.addChunk(msg2[i]); } } assertNotNull(result1); assertNotNull(result2); assertNotNull("message 1 should be complete", result1.getMessage()); assertNull("message 2 should not be complete", result2.getMessage()); // only one is complete, we sent 9 chunks assertEquals(1, counterValueNamed(metricRegistry, COMPLETE_MESSAGES)); assertEquals(9, counterValueNamed(metricRegistry, CHUNK_COUNTER)); assertEquals(1, counterValueNamed(metricRegistry, WAITING_MESSAGES)); assertEquals(0, counterValueNamed(metricRegistry, EXPIRED_CHUNKS)); assertEquals(0, counterValueNamed(metricRegistry, EXPIRED_MESSAGES)); assertEquals(0, counterValueNamed(metricRegistry, DUPLICATE_CHUNKS)); }
public double p99() { return getLinearInterpolation(0.99); }
@Test public void testP99() { HistogramData histogramData1 = HistogramData.linear(0, 0.2, 50); histogramData1.record(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); assertThat(String.format("%.3f", histogramData1.p99()), equalTo("9.180")); HistogramData histogramData2 = HistogramData.linear(0, 0.02, 50); histogramData2.record(0, 0, 0); assertThat(String.format("%.3f", histogramData2.p99()), equalTo("0.020")); }
@Override public String convert(ILoggingEvent event) { List<KeyValuePair> kvpList = event.getKeyValuePairs(); if (kvpList == null || kvpList.isEmpty()) { return CoreConstants.EMPTY_STRING; } StringBuilder sb = new StringBuilder(); for (int i = 0; i < kvpList.size(); i++) { KeyValuePair kvp = kvpList.get(i); if (i != 0) sb.append(' '); sb.append(String.valueOf(kvp.key)); sb.append('='); Character quoteChar = valueQuoteSpec.asChar(); if (quoteChar != null) sb.append(quoteChar); sb.append(String.valueOf(kvp.value)); if (quoteChar != null) sb.append(quoteChar); } return sb.toString(); }
@Test public void smoke() { event.addKeyValuePair(new KeyValuePair("a", "b")); event.addKeyValuePair(new KeyValuePair("k", "v")); String result = converter.convert(event); assertEquals("a=\"b\" k=\"v\"", result); }
@Override public long getBlockIdByIndex(int blockIndex) throws BlockInfoException { if (blockIndex < 0 || blockIndex >= mBlocks.size()) { throw new BlockInfoException( "blockIndex " + blockIndex + " is out of range. File blocks: " + mBlocks.size()); } return mBlocks.get(blockIndex); }
@Test public void getBlockIdByIndex() throws Exception { MutableInodeFile inodeFile = createInodeFile(1); List<Long> blockIds = new ArrayList<>(); final int NUM_BLOCKS = 3; for (int i = 0; i < NUM_BLOCKS; i++) { blockIds.add(inodeFile.getNewBlockId()); } for (int i = 0; i < NUM_BLOCKS; i++) { assertEquals(blockIds.get(i), (Long) inodeFile.getBlockIdByIndex(i)); } try { inodeFile.getBlockIdByIndex(-1); Assert.fail(); } catch (BlockInfoException e) { assertEquals(String.format("blockIndex -1 is out of range. File blocks: %d", NUM_BLOCKS), e.getMessage()); } try { inodeFile.getBlockIdByIndex(NUM_BLOCKS); Assert.fail(); } catch (BlockInfoException e) { assertEquals(String.format("blockIndex %d is out of range. File blocks: %d", NUM_BLOCKS, NUM_BLOCKS), e.getMessage()); } }
@Override public Object getObject(final int columnIndex) throws SQLException { return mergeResultSet.getValue(columnIndex, Object.class); }
@Test void assertGetObjectWithColumnLabel() throws SQLException { when(mergeResultSet.getValue(1, Object.class)).thenReturn("object_value"); assertThat(shardingSphereResultSet.getObject("label"), is("object_value")); }
public ConnectionFactory connectionFactory(ConnectionFactory connectionFactory) { // It is common to implement both interfaces if (connectionFactory instanceof XAConnectionFactory) { return (ConnectionFactory) xaConnectionFactory((XAConnectionFactory) connectionFactory); } return TracingConnectionFactory.create(connectionFactory, this); }
@Test void connectionFactory_doesntDoubleWrap() { ConnectionFactory wrapped = jmsTracing.connectionFactory(mock(ConnectionFactory.class)); assertThat(jmsTracing.connectionFactory(wrapped)) .isSameAs(wrapped); }
public void visit(Entry entry) { final AFreeplaneAction action = new EntryAccessor().getAction(entry); if (action != null) { final EntryAccessor entryAccessor = new EntryAccessor(); String accelerator = entryAccessor.getAccelerator(entry); if(accelerator != null) { map.setDefaultAccelerator(action, accelerator); } else map.setUserDefinedAccelerator(action); entries.registerEntry(action, entry); } }
@Test public void givenEntryWithoutAccelerator_doesNotSetOwnDefaultAccelerator() { Entry actionEntry = new Entry(); final AFreeplaneAction action = mock(AFreeplaneAction.class); new EntryAccessor().setAction(actionEntry, action); IAcceleratorMap map = mock(IAcceleratorMap.class); final AcceleratorBuilder acceleratorBuilder = new AcceleratorBuilder(map, mock(EntriesForAction.class)); acceleratorBuilder.visit(actionEntry); Mockito.verify(map, never()).setDefaultAccelerator(Mockito.<AFreeplaneAction> any(), anyString()); }
@Override public int readUnsignedMedium() { checkReadableBytes0(3); int v = _getUnsignedMedium(readerIndex); readerIndex += 3; return v; }
@Test public void testReadUnsignedMediumAfterRelease() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().readUnsignedMedium(); } }); }
static String toRegularExpression(String glob) { StringBuilder output = new StringBuilder("^"); boolean literal = true; boolean processingGroup = false; for (int i = 0; i < glob.length(); ) { char c = glob.charAt(i++); switch (c) { case '?': literal = false; output.append("."); break; case '*': literal = false; output.append(".*"); break; case '\\': if (i == glob.length()) { output.append(c); } else { char next = glob.charAt(i); i++; if (isGlobSpecialCharacter(next) || isRegularExpressionSpecialCharacter(next)) { output.append('\\'); } output.append(next); } break; case '{': if (processingGroup) { throw new RuntimeException("Can't nest glob groups."); } literal = false; output.append("(?:(?:"); processingGroup = true; break; case ',': if (processingGroup) { literal = false; output.append(")|(?:"); } else { output.append(c); } break; case '}': if (processingGroup) { literal = false; output.append("))"); processingGroup = false; } else { output.append(c); } break; // TODO: handle character ranges default: if (isRegularExpressionSpecialCharacter(c)) { output.append('\\'); } output.append(c); } } if (processingGroup) { throw new RuntimeException("Unterminated glob group."); } if (literal) { return null; } output.append('$'); return output.toString(); }
@Test public void testToRegularExpression() { assertNull(GlobComponent.toRegularExpression("blah")); assertNull(GlobComponent.toRegularExpression("")); assertNull(GlobComponent.toRegularExpression("does not need a regex, actually")); assertEquals("^\\$blah.*$", GlobComponent.toRegularExpression("$blah*")); assertEquals("^.*$", GlobComponent.toRegularExpression("*")); assertEquals("^foo(?:(?:bar)|(?:baz))$", GlobComponent.toRegularExpression("foo{bar,baz}")); }
public void check(AccessResource checkedAccess, AccessResource ownedAccess) { PlainAccessResource checkedPlainAccess = (PlainAccessResource) checkedAccess; PlainAccessResource ownedPlainAccess = (PlainAccessResource) ownedAccess; if (ownedPlainAccess.isAdmin()) { // admin user don't need verification return; } if (Permission.needAdminPerm(checkedPlainAccess.getRequestCode())) { throw new AclException(String.format("Need admin permission for request code=%d, but accessKey=%s is not", checkedPlainAccess.getRequestCode(), ownedPlainAccess.getAccessKey())); } Map<String, Byte> needCheckedPermMap = checkedPlainAccess.getResourcePermMap(); Map<String, Byte> ownedPermMap = ownedPlainAccess.getResourcePermMap(); if (needCheckedPermMap == null) { // If the needCheckedPermMap is null,then return return; } for (Map.Entry<String, Byte> needCheckedEntry : needCheckedPermMap.entrySet()) { String resource = needCheckedEntry.getKey(); Byte neededPerm = needCheckedEntry.getValue(); boolean isGroup = PlainAccessResource.isRetryTopic(resource); if (ownedPermMap == null || !ownedPermMap.containsKey(resource)) { // Check the default perm byte ownedPerm = isGroup ? ownedPlainAccess.getDefaultGroupPerm() : ownedPlainAccess.getDefaultTopicPerm(); if (!Permission.checkPermission(neededPerm, ownedPerm)) { throw new AclException(String.format("No default permission for %s", PlainAccessResource.printStr(resource, isGroup))); } continue; } if (!Permission.checkPermission(neededPerm, ownedPermMap.get(resource))) { throw new AclException(String.format("No permission for %s", PlainAccessResource.printStr(resource, isGroup))); } } }
@Test public void testCheck_withDefaultPermissions_shouldPass() { PlainAccessResource checkedAccess = new PlainAccessResource(); checkedAccess.setRequestCode(Permission.SUB); checkedAccess.addResourceAndPerm("topic1", Permission.PUB); PlainAccessResource ownedAccess = new PlainAccessResource(); ownedAccess.setAccessKey("nonAdminUser"); ownedAccess.setAdmin(false); ownedAccess.setDefaultTopicPerm(Permission.PUB); try { permissionChecker.check(checkedAccess, ownedAccess); } catch (AclException e) { Assert.fail("Should not throw any exception for default permissions"); } }
@Override @Nullable public Object convert(@Nullable String value) { if (isNullOrEmpty(value)) { return null; } LOG.debug("Trying to parse date <{}> with pattern <{}>, locale <{}>, and timezone <{}>.", value, dateFormat, locale, timeZone); final DateTimeFormatter formatter; if (containsTimeZone) { formatter = DateTimeFormat .forPattern(dateFormat) .withDefaultYear(YearMonth.now(timeZone).getYear()) .withLocale(locale); } else { formatter = DateTimeFormat .forPattern(dateFormat) .withDefaultYear(YearMonth.now(timeZone).getYear()) .withLocale(locale) .withZone(timeZone); } return DateTime.parse(value, formatter); }
@Test public void convertUsesCustomLocale() throws Exception { final Converter c = new DateConverter(config("dd/MMM/YYYY HH:mm:ss Z", null, "de-DE")); final DateTime dateTime = (DateTime) c.convert("11/März/2017 15:10:48 +0200"); assertThat(dateTime).isEqualTo("2017-03-11T13:10:48.000Z"); }
public static int checkGreaterThanOrEqual(int n, int expected, String name) { if (n < expected) { throw new IllegalArgumentException(name + ": " + n + " (expected: >= " + expected + ')'); } return n; }
@Test(expected = IllegalArgumentException.class) public void checkGreaterThanOrEqualMustFailIfArgumentIsLessThanExpected() { RangeUtil.checkGreaterThanOrEqual(0, 1, "var"); }
@Override protected void handlePut(final String listenTo, final ClusterProperties discoveryProperties) { if (discoveryProperties != null) { ActivePropertiesResult pickedPropertiesResult = pickActiveProperties(discoveryProperties); ClusterInfoItem newClusterInfoItem = new ClusterInfoItem( _simpleLoadBalancerState, pickedPropertiesResult.clusterProperties, PartitionAccessorFactory.getPartitionAccessor( pickedPropertiesResult.clusterProperties.getClusterName(), _partitionAccessorRegistry, pickedPropertiesResult.clusterProperties.getPartitionProperties()), pickedPropertiesResult.distribution, getFailoutProperties(discoveryProperties)); if (_simpleLoadBalancerState.getClusterInfo().put(listenTo, newClusterInfoItem) == null) { info(_log, "getting new ClusterInfoItem for cluster ", listenTo, ": ", newClusterInfoItem); } _simpleLoadBalancerState.notifyListenersOnClusterInfoUpdates(newClusterInfoItem); // notify the cluster listeners only when discoveryProperties is not null, because we don't // want to count initialization (just because listenToCluster is called) _simpleLoadBalancerState.notifyClusterListenersOnAdd(listenTo); } else { _log.warn("Received a null cluster properties for {}", listenTo); // still insert the ClusterInfoItem when discoveryProperties is null, but don't create accessor _simpleLoadBalancerState.getClusterInfo().put(listenTo, new ClusterInfoItem(_simpleLoadBalancerState, null, null, null)); } }
@Test(dataProvider = "getConfigsWithFailoutProperties") public void testWithFailoutConfigs(ClusterProperties stableConfigs, FailoutProperties clusterFailoutProperties) { ClusterLoadBalancerSubscriberFixture fixture = new ClusterLoadBalancerSubscriberFixture(); fixture.getMockSubscriber(false).handlePut(CLUSTER_NAME, new ClusterStoreProperties( stableConfigs, null, null, clusterFailoutProperties)); LoadBalancerStateItem<FailoutProperties> failoutPropertiesItem = fixture._clusterInfo.get(CLUSTER_NAME).getFailoutPropertiesItem(); Assert.assertNotNull(failoutPropertiesItem); Assert.assertEquals(failoutPropertiesItem.getProperty(), clusterFailoutProperties); }
@Override public String update(List<String> columns, List<String> where) { StringBuilder sql = new StringBuilder(); String method = "UPDATE "; sql.append(method); sql.append(getTableName()).append(" ").append("SET "); for (int i = 0; i < columns.size(); i++) { String[] parts = columns.get(i).split("@"); String column = parts[0]; if (parts.length == 2) { sql.append(column).append(" = ").append(getFunction(parts[1])); } else { sql.append(column).append(" = ").append("?"); } if (i != columns.size() - 1) { sql.append(","); } } if (CollectionUtils.isEmpty(where)) { return sql.toString(); } sql.append(" "); appendWhereClause(where, sql); return sql.toString(); }
@Test void testUpdate() { String sql = abstractMapper.update(Arrays.asList("id", "name"), Arrays.asList("id")); assertEquals("UPDATE tenant_info SET id = ?,name = ? WHERE id = ?", sql); }
@Override public ConsumerRecords<K, V> poll(final Duration timeout) { Timer timer = time.timer(timeout); acquireAndEnsureOpen(); try { kafkaConsumerMetrics.recordPollStart(timer.currentTimeMs()); if (subscriptions.hasNoSubscriptionOrUserAssignment()) { throw new IllegalStateException("Consumer is not subscribed to any topics or assigned any partitions"); } do { // Make sure to let the background thread know that we are still polling. applicationEventHandler.add(new PollEvent(timer.currentTimeMs())); // We must not allow wake-ups between polling for fetches and returning the records. // If the polled fetches are not empty the consumed position has already been updated in the polling // of the fetches. A wakeup between returned fetches and returning records would lead to never // returning the records in the fetches. Thus, we trigger a possible wake-up before we poll fetches. wakeupTrigger.maybeTriggerWakeup(); updateAssignmentMetadataIfNeeded(timer); final Fetch<K, V> fetch = pollForFetches(timer); if (!fetch.isEmpty()) { if (fetch.records().isEmpty()) { log.trace("Returning empty records from `poll()` " + "since the consumer's position has advanced for at least one topic partition"); } return interceptors.onConsume(new ConsumerRecords<>(fetch.records())); } // We will wait for retryBackoffMs } while (timer.notExpired()); return ConsumerRecords.empty(); } finally { kafkaConsumerMetrics.recordPollEnd(timer.currentTimeMs()); release(); } }
@Test @SuppressWarnings("deprecation") public void testPollLongThrowsException() { consumer = newConsumer(); Exception e = assertThrows(UnsupportedOperationException.class, () -> consumer.poll(0L)); assertEquals("Consumer.poll(long) is not supported when \"group.protocol\" is \"consumer\". " + "This method is deprecated and will be removed in the next major release.", e.getMessage()); }
public static void install() { installBasic(); installLight(); installSemiBold(); }
@Test void testFont() { FlatInterFont.install(); testFont( FlatInterFont.FAMILY, Font.PLAIN, 13 ); testFont( FlatInterFont.FAMILY, Font.ITALIC, 13 ); testFont( FlatInterFont.FAMILY, Font.BOLD, 13 ); testFont( FlatInterFont.FAMILY, Font.BOLD | Font.ITALIC, 13 ); testFont( FlatInterFont.FAMILY_LIGHT, Font.PLAIN, 13 ); testFont( FlatInterFont.FAMILY_LIGHT, Font.ITALIC, 13 ); testFont( FlatInterFont.FAMILY_SEMIBOLD, Font.PLAIN, 13 ); testFont( FlatInterFont.FAMILY_SEMIBOLD, Font.ITALIC, 13 ); }
public static String generateTransactionHashHexEncoded( RawTransaction rawTransaction, Credentials credentials) { return Numeric.toHexString(generateTransactionHash(rawTransaction, credentials)); }
@Test public void testGenerateEip155TransactionHash() { assertEquals( generateTransactionHashHexEncoded( TransactionEncoderTest.createContractTransaction(), (byte) 1, SampleKeys.CREDENTIALS), ("0x568c7f6920c1cee8332e245c473657b9c53044eb96ed7532f5550f1139861e9e")); }
@VisibleForTesting static MemoryMonitor forTest( GCStatsProvider gcStatsProvider, long sleepTimeMillis, int shutDownAfterNumGCThrashing, boolean canDumpHeap, double gcThrashingPercentagePerPeriod, @Nullable String uploadFilePath, File localDumpFolder) { return new MemoryMonitor( gcStatsProvider, sleepTimeMillis, shutDownAfterNumGCThrashing, canDumpHeap, gcThrashingPercentagePerPeriod, uploadFilePath, localDumpFolder); }
@Test public void disableMemoryMonitor() throws Exception { MemoryMonitor disabledMonitor = MemoryMonitor.forTest(provider, 10, 0, true, 100.0, null, localDumpFolder); Thread disabledMonitorThread = new Thread(disabledMonitor); disabledMonitorThread.start(); // Monitor thread should stop quickly after starting. Wait 10 seconds, and check that monitor // thread is not alive. disabledMonitorThread.join(10000); assertFalse(disabledMonitorThread.isAlive()); // Enabled monitor thread should still be running. assertTrue(thread.isAlive()); }
public static String generateNewWalletFile(String password, File destinationDirectory) throws CipherException, InvalidAlgorithmParameterException, NoSuchAlgorithmException, NoSuchProviderException, IOException { return generateFullNewWalletFile(password, destinationDirectory); }
@Test public void testGenerateNewWalletFile() throws Exception { String fileName = WalletUtils.generateNewWalletFile(PASSWORD, tempDir); testGeneratedNewWalletFile(fileName); }
static Map<String, String> parseConfig(byte[] byteConfig) { String config = new String(byteConfig, StandardCharsets.US_ASCII); Map<String, String> configMap = Util.parseMap(config); Map<String, String> serverMap = new HashMap<>(configMap.size() - 1); for (Map.Entry<String, String> entry : configMap.entrySet()) { if (entry.getKey().startsWith("server.")) { serverMap.put(entry.getKey(), entry.getValue()); } } return serverMap; }
@Test public void testParseConfig() { String config = "server.1=my-cluster-zookeeper-0.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181\n" + "server.2=my-cluster-zookeeper-1.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181\n" + "server.3=my-cluster-zookeeper-2.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181\n" + "version=100000000b"; Map<String, String> expected = new HashMap<>(3); expected.put("server.1", "my-cluster-zookeeper-0.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181"); expected.put("server.2", "my-cluster-zookeeper-1.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181"); expected.put("server.3", "my-cluster-zookeeper-2.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181"); assertThat(ZookeeperScaler.parseConfig(config.getBytes(StandardCharsets.US_ASCII)), is(expected)); }
@Udf public String concatWS( @UdfParameter(description = "Separator string and values to join") final String... inputs) { if (inputs == null || inputs.length < 2) { throw new KsqlFunctionException("Function Concat_WS expects at least two input arguments."); } final String separator = inputs[0]; if (separator == null) { return null; } return Arrays.stream(inputs, 1, inputs.length) .filter(Objects::nonNull) .collect(Collectors.joining(separator)); }
@Test public void shouldConcatBytes() { assertThat(udf.concatWS(ByteBuffer.wrap(new byte[] {1}), ByteBuffer.wrap(new byte[] {2}), ByteBuffer.wrap(new byte[] {3})), is(ByteBuffer.wrap(new byte[] {2, 1, 3}))); }
public TokenResponse exchangePkceCode( URI tokenEndpoint, String code, String redirectUri, String clientId, String codeVerifier) { var body = UrlFormBodyBuilder.create() .param("grant_type", "authorization_code") .param("redirect_uri", redirectUri) .param("client_id", clientId) .param("code", code) .param("code_verifier", codeVerifier) .build(); var headers = List.of( new Header(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON), new Header(HttpHeaders.CONTENT_TYPE, UrlFormBodyBuilder.MEDIA_TYPE)); var req = new Request(tokenEndpoint, "POST", headers, body); var res = httpClient.call(req); if (res.status() != 200) { throw HttpExceptions.httpFailBadStatus(req.method(), tokenEndpoint, res.status()); } return JsonCodec.readValue(res.body(), TokenResponse.class); }
@Test void exchangePkceCode(WireMockRuntimeInfo wm) { var body = """ { "access_token" : null, "token_type" : "Bearer", "expires_in" : 3600, "id_token" : "eyJraWQiOiIxZTlnZGs3IiwiYWxnIjoiUl..." } """ .getBytes(StandardCharsets.UTF_8); var path = "/auth/token"; stubFor(post(path).willReturn(ok().withBody(body))); var base = URI.create(wm.getHttpBaseUrl()); var code = "s3cret"; var codeVerifier = "k3k3k"; var clientId = "myclient"; var redirectUri = "http://localhost:8080/callback"; var res = client.exchangePkceCode(base.resolve(path), code, redirectUri, clientId, codeVerifier); assertEquals("Bearer", res.tokenType()); assertEquals(3600, res.expiresIn()); assertThat(res.idToken(), not(emptyOrNullString())); }
@Override public synchronized int read(long pos, byte[] b, int start, int len) throws IOException { this.pos = (int) pos; return read(b, start, len); }
@Test void randomReads() throws Exception { Random random = new Random(19820210); int length = random.nextInt(SIZE) + 1; byte[] data = new byte[length]; random.nextBytes(data); Input in = new InputBytes(data); for (int i = 0; i < COUNT; i++) { int p = random.nextInt(length); int l = Math.min(random.nextInt(SIZE / 10), length - p); byte[] buffer = new byte[l]; in.read(p, buffer, 0, l); assertArrayEquals(Arrays.copyOfRange(data, p, p + l), buffer); } in.close(); }
@Override protected CompletableFuture<JobVertexBackPressureInfo> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull RestfulGateway gateway) throws RestHandlerException { metricFetcher.update(); final JobID jobId = request.getPathParameter(JobIDPathParameter.class); final JobVertexID jobVertexId = request.getPathParameter(JobVertexIdPathParameter.class); TaskMetricStore taskMetricStore = metricFetcher .getMetricStore() .getTaskMetricStore(jobId.toString(), jobVertexId.toString()); Map<String, Map<Integer, Integer>> jobRepresentativeExecutions = metricFetcher.getMetricStore().getRepresentativeAttempts().get(jobId.toString()); Map<Integer, Integer> representativeAttempts = jobRepresentativeExecutions != null ? jobRepresentativeExecutions.get(jobVertexId.toString()) : null; return CompletableFuture.completedFuture( taskMetricStore != null ? createJobVertexBackPressureInfo(taskMetricStore, representativeAttempts) : JobVertexBackPressureInfo.deprecated()); }
@Test void testAbsentBackPressure() throws Exception { final Map<String, String> pathParameters = new HashMap<>(); pathParameters.put( JobIDPathParameter.KEY, TEST_JOB_ID_BACK_PRESSURE_STATS_ABSENT.toString()); pathParameters.put(JobVertexIdPathParameter.KEY, new JobVertexID().toString()); final HandlerRequest<EmptyRequestBody> request = HandlerRequest.resolveParametersAndCreate( EmptyRequestBody.getInstance(), new JobVertexMessageParameters(), pathParameters, Collections.emptyMap(), Collections.emptyList()); final CompletableFuture<JobVertexBackPressureInfo> jobVertexBackPressureInfoCompletableFuture = jobVertexBackPressureHandler.handleRequest(request, restfulGateway); final JobVertexBackPressureInfo jobVertexBackPressureInfo = jobVertexBackPressureInfoCompletableFuture.get(); assertThat(jobVertexBackPressureInfo.getStatus()) .isEqualTo(VertexBackPressureStatus.DEPRECATED); }
public void resolveAssertionConsumerService(AuthenticationRequest authenticationRequest) throws SamlValidationException { // set URL if set in authnRequest final String authnAcsURL = authenticationRequest.getAuthnRequest().getAssertionConsumerServiceURL(); if (authnAcsURL != null) { authenticationRequest.setAssertionConsumerURL(authnAcsURL); return; } // search url from metadata endpoints final Integer authnAcsIdx = authenticationRequest.getAuthnRequest().getAssertionConsumerServiceIndex(); List<Endpoint> endpoints = authenticationRequest.getConnectionEntity().getRoleDescriptors().get(0).getEndpoints(AssertionConsumerService.DEFAULT_ELEMENT_NAME); if (endpoints.isEmpty()) { throw new SamlValidationException("Authentication: Assertion Consumer Service not found in metadata"); } if (authnAcsIdx != null && endpoints.size() <= authnAcsIdx) { throw new SamlValidationException("Authentication: Assertion Consumer Index is out of bounds"); } // TODO: check if this statement is correct if (endpoints.size() == 1) { authenticationRequest.setAssertionConsumerURL(endpoints.get(0).getLocation()); return; } if(authnAcsIdx == null) { AssertionConsumerService defaultAcs = endpoints.stream() .filter(e -> e instanceof AssertionConsumerService) .map(acs -> (AssertionConsumerService) acs) .filter(IndexedEndpoint::isDefault) .findAny() .orElse(null); if (defaultAcs == null) { throw new SamlValidationException("Authentication: There is no default AssertionConsumerService"); } authenticationRequest.setAssertionConsumerURL(defaultAcs.getLocation()); return; } authenticationRequest.setAssertionConsumerURL(endpoints.get(authnAcsIdx).getLocation()); }
@Test void resolveAcsUrlWithIndex1InMultiAcsMetadata() throws SamlValidationException { AuthnRequest authnRequest = OpenSAMLUtils.buildSAMLObject(AuthnRequest.class); authnRequest.setAssertionConsumerServiceIndex(1); AuthenticationRequest authenticationRequest = new AuthenticationRequest(); authenticationRequest.setAuthnRequest(authnRequest); authenticationRequest.setConnectionEntity(MetadataParser.readMetadata(stubsMultiAcsMetadataFile, CONNECTION_ENTITY_ID)); assertionConsumerServiceUrlService.resolveAssertionConsumerService(authenticationRequest); assertEquals("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS", authenticationRequest.getAssertionConsumerURL()); }
@Override public ZFrame pop() { return frames.poll(); }
@Test public void testPop() { ZMsg msg = new ZMsg(); assertThat(msg.popString(), nullValue()); }
public Map<String, LdapContextFactory> getContextFactories() { if (contextFactories == null) { contextFactories = new LinkedHashMap<>(); String[] serverKeys = config.getStringArray(LDAP_SERVERS_PROPERTY); if (serverKeys.length > 0) { initMultiLdapConfiguration(serverKeys); } else { initSimpleLdapConfiguration(); } } return contextFactories; }
@Test public void testContextFactoriesWithSingleLdap() { LdapSettingsManager settingsManager = new LdapSettingsManager( generateSingleLdapSettingsWithUserAndGroupMapping().asConfig()); assertThat(settingsManager.getContextFactories()).hasSize(1); }
public float add(int i, int j, float b) { return A[index(i, j)] += b; }
@Test public void testAdd() { System.out.println("add"); float[][] A = { { 0.7220180f, 0.07121225f, 0.6881997f}, {-0.2648886f, -0.89044952f, 0.3700456f}, {-0.6391588f, 0.44947578f, 0.6240573f} }; float[][] B = { {0.6881997f, -0.07121225f, 0.7220180f}, {0.3700456f, 0.89044952f, -0.2648886f}, {0.6240573f, -0.44947578f, -0.6391588f} }; float[][] C = { { 1.4102177f, 0f, 1.4102177f}, { 0.1051570f, 0f, 0.1051570f}, {-0.0151015f, 0f, -0.0151015f} }; Matrix a = Matrix.of(A); Matrix b = Matrix.of(B); a.add(1.0f, b); assertTrue(MathEx.equals(C, a.toArray(), 1E-6f)); }
public void run() { status = GameStatus.RUNNING; Thread gameThread = new Thread(this::processGameLoop); gameThread.start(); }
@Test void testRun() { gameLoop.run(); Assertions.assertEquals(GameStatus.RUNNING, gameLoop.status); }
public static void applyLocaleToContext(@NonNull Context context, @Nullable String localeString) { final Locale forceLocale = LocaleTools.getLocaleForLocaleString(localeString); final Configuration configuration = context.getResources().getConfiguration(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { configuration.setLocale(forceLocale); } else { //noinspection deprecation configuration.locale = forceLocale; } context.getResources().updateConfiguration(configuration, null); }
@SuppressLint("UseSdkSuppress") @RequiresApi(api = Build.VERSION_CODES.N) @Test @Config(sdk = Build.VERSION_CODES.N) public void testSetAndResetValueAPI24() { Assert.assertEquals( "English (United States)", mContext.getResources().getConfiguration().locale.getDisplayName()); Assert.assertEquals(1, mContext.getResources().getConfiguration().getLocales().size()); Assert.assertEquals( Locale.getDefault().getDisplayName(), mContext.getResources().getConfiguration().getLocales().get(0).getDisplayName()); LocaleTools.applyLocaleToContext(mContext, "ru"); Assert.assertEquals("ru", mContext.getResources().getConfiguration().locale.getLanguage()); Assert.assertEquals( "Russian", mContext.getResources().getConfiguration().locale.getDisplayName()); Assert.assertEquals(1, mContext.getResources().getConfiguration().getLocales().size()); Assert.assertEquals( "Russian", mContext.getResources().getConfiguration().getLocales().get(0).getDisplayName()); LocaleTools.applyLocaleToContext(mContext, ""); Assert.assertEquals( Locale.getDefault().getLanguage(), mContext.getResources().getConfiguration().locale.getLanguage()); Assert.assertEquals(1, mContext.getResources().getConfiguration().getLocales().size()); Assert.assertEquals( Locale.getDefault().getDisplayName(), mContext.getResources().getConfiguration().getLocales().get(0).getDisplayName()); LocaleTools.applyLocaleToContext(mContext, "NONE_EXISTING"); // in this API level, Android is more strict, we can not set invalid values. Assert.assertEquals("en", mContext.getResources().getConfiguration().locale.getLanguage()); }
@Override public DescribeClientQuotasResult describeClientQuotas(ClientQuotaFilter filter, DescribeClientQuotasOptions options) { KafkaFutureImpl<Map<ClientQuotaEntity, Map<String, Double>>> future = new KafkaFutureImpl<>(); final long now = time.milliseconds(); runnable.call(new Call("describeClientQuotas", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override DescribeClientQuotasRequest.Builder createRequest(int timeoutMs) { return new DescribeClientQuotasRequest.Builder(filter); } @Override void handleResponse(AbstractResponse abstractResponse) { DescribeClientQuotasResponse response = (DescribeClientQuotasResponse) abstractResponse; response.complete(future); } @Override void handleFailure(Throwable throwable) { future.completeExceptionally(throwable); } }, now); return new DescribeClientQuotasResult(future); }
@Test public void testDescribeClientQuotas() throws Exception { try (AdminClientUnitTestEnv env = mockClientEnv()) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); final String value = "value"; Map<ClientQuotaEntity, Map<String, Double>> responseData = new HashMap<>(); ClientQuotaEntity entity1 = newClientQuotaEntity(ClientQuotaEntity.USER, "user-1", ClientQuotaEntity.CLIENT_ID, value); ClientQuotaEntity entity2 = newClientQuotaEntity(ClientQuotaEntity.USER, "user-2", ClientQuotaEntity.CLIENT_ID, value); responseData.put(entity1, Collections.singletonMap("consumer_byte_rate", 10000.0)); responseData.put(entity2, Collections.singletonMap("producer_byte_rate", 20000.0)); env.kafkaClient().prepareResponse(DescribeClientQuotasResponse.fromQuotaEntities(responseData, 0)); ClientQuotaFilter filter = ClientQuotaFilter.contains(singletonList(ClientQuotaFilterComponent.ofEntity(ClientQuotaEntity.USER, value))); DescribeClientQuotasResult result = env.adminClient().describeClientQuotas(filter); Map<ClientQuotaEntity, Map<String, Double>> resultData = result.entities().get(); assertEquals(resultData.size(), 2); assertTrue(resultData.containsKey(entity1)); Map<String, Double> config1 = resultData.get(entity1); assertEquals(config1.size(), 1); assertEquals(config1.get("consumer_byte_rate"), 10000.0, 1e-6); assertTrue(resultData.containsKey(entity2)); Map<String, Double> config2 = resultData.get(entity2); assertEquals(config2.size(), 1); assertEquals(config2.get("producer_byte_rate"), 20000.0, 1e-6); } }
public static GoPluginBundleDescriptor parseXML(InputStream pluginXml, BundleOrPluginFileDetails bundleOrPluginJarFile) throws IOException, JAXBException, XMLStreamException, SAXException { return parseXML(pluginXml, bundleOrPluginJarFile.file().getAbsolutePath(), bundleOrPluginJarFile.extractionLocation(), bundleOrPluginJarFile.isBundledPlugin()); }
@Test void shouldValidatePluginVersion() throws IOException { try (InputStream pluginXml = IOUtils.toInputStream("<go-plugin version=\"10\"></go-plugin>", StandardCharsets.UTF_8)) { JAXBException e = assertThrows(JAXBException.class, () -> GoPluginDescriptorParser.parseXML(pluginXml, "/tmp/", new File("/tmp/"), true)); assertTrue(e.getCause().getMessage().contains("Value '10' of attribute 'version' of element 'go-plugin' is not valid"), format("Message not correct: [%s]", e.getCause().getMessage())); } }
@Override public Mono<GetVersionedProfileResponse> getVersionedProfile(final GetVersionedProfileAnonymousRequest request) { final ServiceIdentifier targetIdentifier = ServiceIdentifierUtil.fromGrpcServiceIdentifier(request.getRequest().getAccountIdentifier()); if (targetIdentifier.identityType() != IdentityType.ACI) { throw Status.INVALID_ARGUMENT.withDescription("Expected ACI service identifier").asRuntimeException(); } return getTargetAccountAndValidateUnidentifiedAccess(targetIdentifier, request.getUnidentifiedAccessKey().toByteArray()) .flatMap(targetAccount -> ProfileGrpcHelper.getVersionedProfile(targetAccount, profilesManager, request.getRequest().getVersion())); }
@Test void getVersionedProfileVersionNotFound() { final byte[] unidentifiedAccessKey = TestRandomUtil.nextBytes(UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH); when(account.getUnidentifiedAccessKey()).thenReturn(Optional.of(unidentifiedAccessKey)); when(account.isUnrestrictedUnidentifiedAccess()).thenReturn(false); when(accountsManager.getByServiceIdentifierAsync(any())).thenReturn(CompletableFuture.completedFuture(Optional.of(account))); when(profilesManager.getAsync(any(), any())).thenReturn(CompletableFuture.completedFuture(Optional.empty())); final GetVersionedProfileAnonymousRequest request = GetVersionedProfileAnonymousRequest.newBuilder() .setUnidentifiedAccessKey(ByteString.copyFrom(unidentifiedAccessKey)) .setRequest(GetVersionedProfileRequest.newBuilder() .setAccountIdentifier(ServiceIdentifier.newBuilder() .setIdentityType(IdentityType.IDENTITY_TYPE_ACI) .setUuid(ByteString.copyFrom(UUIDUtil.toBytes(UUID.randomUUID()))) .build()) .setVersion("someVersion") .build()) .build(); assertStatusException(Status.NOT_FOUND, () -> unauthenticatedServiceStub().getVersionedProfile(request)); }
public static XPathExpression buildXPathMatcherFromRules(String rules) throws XPathExpressionException { XPath xpath = XPathFactory.newInstance().newXPath(); WritableNamespaceContext nsContext = new WritableNamespaceContext(); // Parse SoapUI rules for getting namespaces and expression to evaluate. // declare namespace ser='http://www.example.com/test/service'; // //ser:sayHello/name String xpathExpression = null; String lines[] = rules.split("\\r?\\n"); for (String line : lines) { line = line.trim(); if (line.startsWith("declare namespace ")) { String prefix = line.substring(18, line.indexOf("=")); String namespace = line.substring(line.indexOf("=") + 2, line.lastIndexOf("'")); nsContext.addNamespaceURI(prefix, namespace); } else { xpathExpression = line; } } // Set namespace context and compile expression. xpath.setNamespaceContext(nsContext); return xpath.compile(xpathExpression); }
@Test void testBuildXPathMatcherFromRulesFunction() { String rules = "declare namespace ser='http://www.example.com/hello';\n" + "concat(//ser:sayHello/title/text(),' ',//ser:sayHello/name/text())"; String soap = "<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\" xmlns:hel=\"http://www.example.com/hello\">\n" + " <soapenv:Header/>\n" + " <soapenv:Body>\n" + " <hel:sayHello>\n" + " <title>Ms.</title>\n" + " <name>Karla</name>\n" + " </hel:sayHello>\n" + " </soapenv:Body>\n" + "</soapenv:Envelope>"; XPathExpression expression = null; try { expression = SoapUIXPathBuilder.buildXPathMatcherFromRules(rules); } catch (Throwable t) { fail("No exception should be thrown while parsing rules"); } String result = null; try { result = expression.evaluate(new InputSource(new StringReader(soap))); } catch (Throwable t) { fail("No exception should be thrown while evaluating xpath"); } assertEquals("Ms. Karla", result); }
public boolean hasAnyMethodHandlerAnnotation() { return !operationsWithHandlerAnnotation.isEmpty(); }
@Test public void testHandlerInFunctionalInterfaceWithMethodReference() { MyClass myClass = new MyClass(); MyHandlerInterface mhi = (MyHandlerInterface) myClass::myOtherMethod; BeanInfo info = new BeanInfo(context, mhi.getClass()); assertTrue(info.hasAnyMethodHandlerAnnotation()); }
public void writeBytes(final byte[] value) { byteBuf.writeBytes(value); }
@Test void assertWriteBytes() { new MySQLPacketPayload(byteBuf, StandardCharsets.UTF_8).writeBytes("value".getBytes()); verify(byteBuf).writeBytes("value".getBytes()); }
@Override public void commit() { if (context == null || context.isEmpty()) { return; } LOGGER.info("Commit started"); if (context.containsKey(UnitActions.INSERT.getActionValue())) { commitInsert(); } if (context.containsKey(UnitActions.MODIFY.getActionValue())) { commitModify(); } if (context.containsKey(UnitActions.DELETE.getActionValue())) { commitDelete(); } LOGGER.info("Commit finished."); }
@Test void shouldNotWriteToDbIfNothingToCommit() { var weaponRepository = new ArmsDealer(new HashMap<>(), weaponDatabase); weaponRepository.commit(); verifyNoMoreInteractions(weaponDatabase); }
public @CheckForNull String readLink() throws IOException { return null; }
@Issue("JENKINS-26810") @Test public void readLink() throws Exception { assumeFalse(Functions.isWindows()); File root = tmp.getRoot(); FilePath rootF = new FilePath(root); rootF.child("plain").write("", null); rootF.child("link").symlinkTo("physical", TaskListener.NULL); for (VirtualFile vf : new VirtualFile[] {VirtualFile.forFile(root), VirtualFile.forFilePath(rootF)}) { assertNull(vf.readLink()); assertNull(vf.child("plain").readLink()); VirtualFile link = vf.child("link"); assertEquals("physical", link.readLink()); assertFalse(link.isFile()); assertFalse(link.isDirectory()); // not checking .exists() for now } }
@Override public DataSerializableFactory createFactory() { return new Factory(); }
@Test(expected = IllegalArgumentException.class) public void testInvalidType() { MetricsDataSerializerHook hook = new MetricsDataSerializerHook(); hook.createFactory().create(999); }
public CacheConfig<K, V> setAsyncBackupCount(int asyncBackupCount) { this.asyncBackupCount = checkAsyncBackupCount(backupCount, asyncBackupCount); return this; }
@Test(expected = IllegalArgumentException.class) public void setAsyncBackupCount_whenTooLarge() { CacheConfig config = new CacheConfig(); config.setAsyncBackupCount(200); //max allowed is 6.. }
public static String toJson(FileScanTask fileScanTask) { Preconditions.checkArgument(fileScanTask != null, "Invalid scan task: null"); return JsonUtil.generate(generator -> toJson(fileScanTask, generator), false); }
@Test public void unsupportedTask() { FileScanTask mockTask = Mockito.mock(FileScanTask.class); assertThatThrownBy(() -> ScanTaskParser.toJson(mockTask)) .isInstanceOf(UnsupportedOperationException.class) .hasMessageContaining( "Unsupported task type: org.apache.iceberg.FileScanTask$MockitoMock$"); }
public static String getDefaultContextPath() { return defaultContextPath; }
@Test void testGetDefaultContextPath() { String defaultVal = ParamUtil.getDefaultContextPath(); assertEquals(defaultContextPath, defaultVal); String expect = "test"; ParamUtil.setDefaultContextPath(expect); assertEquals(expect, ParamUtil.getDefaultContextPath()); }
public static boolean containsWildcard(String regex) { return (regex.contains("?") || regex.contains("*")); }
@Test void testContainsWildcard() { assertFalse(RegexParser.containsWildcard("test")); assertTrue(RegexParser.containsWildcard("?")); assertTrue(RegexParser.containsWildcard("*")); }
@Override public String toString() { return MoreObjects.toStringHelper(getClass()) .add("ip", ipAddress) .add("timestamp", timestamp) .add("lease", leasePeriod) .add("assignmentStatus", assignmentStatus) .add("subnetMask", subnetMask) .add("broadcast", broadcast) .add("dhcpServer", dhcpServer) .add("routerAddress", routerAddress) .add("domainServer", domainServer) .toString(); }
@Test public void testToString() { assertThat(stats1.toString(), is(stats1.toString())); }
public static String normalizeUri(String uri) throws URISyntaxException { // try to parse using the simpler and faster Camel URI parser String[] parts = CamelURIParser.fastParseUri(uri); if (parts != null) { // we optimized specially if an empty array is returned if (parts == URI_ALREADY_NORMALIZED) { return uri; } // use the faster and more simple normalizer return doFastNormalizeUri(parts); } else { // use the legacy normalizer as the uri is complex and may have unsafe URL characters return doComplexNormalizeUri(uri); } }
@Test public void testNormalizeEndpointUri() throws Exception { String out1 = URISupport.normalizeUri("smtp://localhost?username=davsclaus&password=secret"); String out2 = URISupport.normalizeUri("smtp://localhost?password=secret&username=davsclaus"); assertEquals(out1, out2); out1 = URISupport.normalizeUri("smtp://localhost?username=davsclaus&password=secret"); out2 = URISupport.normalizeUri("smtp:localhost?password=secret&username=davsclaus"); assertEquals(out1, out2); out1 = URISupport.normalizeUri("smtp:localhost?password=secret&username=davsclaus"); out2 = URISupport.normalizeUri("smtp://localhost?username=davsclaus&password=secret"); assertEquals(out1, out2); out1 = URISupport.normalizeUri("seda:foo?concurrentConsumer=2"); out2 = URISupport.normalizeUri("seda:foo?concurrentConsumer=2"); assertEquals(out1, out2); out1 = URISupport.normalizeUri("seda:foo?concurrentConsumer=2"); out2 = URISupport.normalizeUri("seda:foo"); assertNotSame(out1, out2); out1 = URISupport.normalizeUri("foo:?test=1"); out2 = URISupport.normalizeUri("foo://?test=1"); assertEquals("foo://?test=1", out2); assertEquals(out1, out2); }
public List<DataRecord> merge(final List<DataRecord> dataRecords) { Map<DataRecord.Key, DataRecord> result = new HashMap<>(); dataRecords.forEach(each -> { if (PipelineSQLOperationType.INSERT == each.getType()) { mergeInsert(each, result); } else if (PipelineSQLOperationType.UPDATE == each.getType()) { mergeUpdate(each, result); } else if (PipelineSQLOperationType.DELETE == each.getType()) { mergeDelete(each, result); } }); return new ArrayList<>(result.values()); }
@Test void assertDeleteBeforeDelete() { DataRecord beforeDataRecord = mockDeleteDataRecord(1, 1, 1); DataRecord afterDataRecord = mockDeleteDataRecord(1, 1, 1); assertThrows(PipelineUnexpectedDataRecordOrderException.class, () -> groupEngine.merge(Arrays.asList(beforeDataRecord, afterDataRecord))); }
@SuppressWarnings("unchecked") public static synchronized <T extends Cache> T createCache(String name) { T cache = (T) caches.get(name); if (cache != null) { return cache; } cache = (T) cacheFactoryStrategy.createCache(name); log.info("Created cache [" + cacheFactoryStrategy.getClass().getName() + "] for " + name); return wrapCache(cache, name); }
@Test public void testCacheRecreation() throws Exception { // Setup test fixture. final String name = "unittest-cache-recreation"; // Execute system under test. final Cache resultA = CacheFactory.createCache(name); final Cache resultB = CacheFactory.createCache(name); // Verify results. assertNotNull(resultB); assertSame(resultA, resultB); }
public static Builder builder(Type type) { return new Builder(type); }
@Test public void build_without_key_throws_NPE_if_component_arg_is_Null() { assertThatThrownBy(() -> builder(FILE).setUuid("ABCD").build()) .isInstanceOf(NullPointerException.class); }