focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Udf(description = "Returns the INT base raised to the INT exponent.") public Double power( @UdfParameter( value = "base", description = "the base of the power." ) final Integer base, @UdfParameter( value = "exponent", description = "the exponent of the power." ) final Integer exponent ) { return power( base == null ? null : base.doubleValue(), exponent == null ? null : exponent.doubleValue() ); }
@Test public void shouldHandleNull() { assertThat(udf.power(null, 13), is(nullValue())); assertThat(udf.power(null, 13L), is(nullValue())); assertThat(udf.power(null, 13.0), is(nullValue())); assertThat(udf.power(13, null), is(nullValue())); assertThat(udf.power(13L, null), is(nullValue())); assertThat(udf.power(13.0, null), is(nullValue())); }
@Override public boolean canCommit(TaskAttemptId taskAttemptID) { readLock.lock(); boolean canCommit = false; try { if (commitAttempt != null) { canCommit = taskAttemptID.equals(commitAttempt); LOG.info("Result of canCommit for " + taskAttemptID + ":" + canCommit); } } finally { readLock.unlock(); } return canCommit; }
@Test public void testKillDuringTaskAttemptCommit() { mockTask = createMockTask(TaskType.REDUCE); TaskId taskId = getNewTaskID(); scheduleTaskAttempt(taskId); launchTaskAttempt(getLastAttempt().getAttemptId()); updateLastAttemptState(TaskAttemptState.COMMIT_PENDING); commitTaskAttempt(getLastAttempt().getAttemptId()); TaskAttemptId commitAttempt = getLastAttempt().getAttemptId(); updateLastAttemptState(TaskAttemptState.KILLED); killRunningTaskAttempt(commitAttempt); assertFalse(mockTask.canCommit(commitAttempt)); }
public static <T extends PipelineOptions> T as(Class<T> klass) { return new Builder().as(klass); }
@Test public void testMissingGettersAndSettersThrows() { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage( "missing property methods on [org.apache.beam.sdk.options." + "PipelineOptionsFactoryTest$MissingGettersAndSetters]"); expectedException.expectMessage("getter for property [object] of type [java.lang.Object]"); expectedException.expectMessage("setter for property [otherObject] of type [java.lang.Object]"); PipelineOptionsFactory.as(MissingGettersAndSetters.class); }
public void statusUpdate(TaskUmbilicalProtocol umbilical) throws IOException { int retries = MAX_RETRIES; while (true) { try { if (!umbilical.statusUpdate(getTaskID(), taskStatus).getTaskFound()) { if (uberized) { LOG.warn("Task no longer available: " + taskId); break; } else { LOG.warn("Parent died. Exiting " + taskId); ExitUtil.terminate(66); } } taskStatus.clearStatus(); return; } catch (InterruptedException ie) { Thread.currentThread().interrupt(); // interrupt ourself } catch (IOException ie) { LOG.warn("Failure sending status update: " + StringUtils.stringifyException(ie)); if (--retries == 0) { throw ie; } } } }
@Test public void testStatusUpdateDoesNotExitInUberMode() throws Exception { setupTest(true); task.statusUpdate(umbilical); }
@SuppressWarnings("MethodLength") static void dissectControlRequest( final ArchiveEventCode eventCode, final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int encodedLength = dissectLogHeader(CONTEXT, eventCode, buffer, offset, builder); HEADER_DECODER.wrap(buffer, offset + encodedLength); encodedLength += MessageHeaderDecoder.ENCODED_LENGTH; switch (eventCode) { case CMD_IN_CONNECT: CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendConnect(builder); break; case CMD_IN_CLOSE_SESSION: CLOSE_SESSION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendCloseSession(builder); break; case CMD_IN_START_RECORDING: START_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording(builder); break; case CMD_IN_STOP_RECORDING: STOP_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecording(builder); break; case CMD_IN_REPLAY: REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplay(builder); break; case CMD_IN_STOP_REPLAY: STOP_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplay(builder); break; case CMD_IN_LIST_RECORDINGS: LIST_RECORDINGS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordings(builder); break; case CMD_IN_LIST_RECORDINGS_FOR_URI: LIST_RECORDINGS_FOR_URI_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingsForUri(builder); break; case CMD_IN_LIST_RECORDING: LIST_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecording(builder); break; case CMD_IN_EXTEND_RECORDING: EXTEND_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording(builder); break; case CMD_IN_RECORDING_POSITION: RECORDING_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendRecordingPosition(builder); break; case CMD_IN_TRUNCATE_RECORDING: TRUNCATE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTruncateRecording(builder); break; case CMD_IN_STOP_RECORDING_SUBSCRIPTION: STOP_RECORDING_SUBSCRIPTION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingSubscription(builder); break; case CMD_IN_STOP_POSITION: STOP_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopPosition(builder); break; case CMD_IN_FIND_LAST_MATCHING_RECORD: FIND_LAST_MATCHING_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendFindLastMatchingRecord(builder); break; case CMD_IN_LIST_RECORDING_SUBSCRIPTIONS: LIST_RECORDING_SUBSCRIPTIONS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendListRecordingSubscriptions(builder); break; case CMD_IN_START_BOUNDED_REPLAY: BOUNDED_REPLAY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartBoundedReplay(builder); break; case CMD_IN_STOP_ALL_REPLAYS: STOP_ALL_REPLAYS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopAllReplays(builder); break; case CMD_IN_REPLICATE: REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate(builder); break; case CMD_IN_STOP_REPLICATION: STOP_REPLICATION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopReplication(builder); break; case CMD_IN_START_POSITION: START_POSITION_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartPosition(builder); break; case CMD_IN_DETACH_SEGMENTS: DETACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDetachSegments(builder); break; case CMD_IN_DELETE_DETACHED_SEGMENTS: DELETE_DETACHED_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendDeleteDetachedSegments(builder); break; case CMD_IN_PURGE_SEGMENTS: PURGE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeSegments(builder); break; case CMD_IN_ATTACH_SEGMENTS: ATTACH_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAttachSegments(builder); break; case CMD_IN_MIGRATE_SEGMENTS: MIGRATE_SEGMENTS_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendMigrateSegments(builder); break; case CMD_IN_AUTH_CONNECT: AUTH_CONNECT_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendAuthConnect(builder); break; case CMD_IN_KEEP_ALIVE: KEEP_ALIVE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendKeepAlive(builder); break; case CMD_IN_TAGGED_REPLICATE: TAGGED_REPLICATE_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendTaggedReplicate(builder); break; case CMD_IN_START_RECORDING2: START_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStartRecording2(builder); break; case CMD_IN_EXTEND_RECORDING2: EXTEND_RECORDING_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendExtendRecording2(builder); break; case CMD_IN_STOP_RECORDING_BY_IDENTITY: STOP_RECORDING_BY_IDENTITY_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendStopRecordingByIdentity(builder); break; case CMD_IN_PURGE_RECORDING: PURGE_RECORDING_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendPurgeRecording(builder); break; case CMD_IN_REPLICATE2: REPLICATE_REQUEST2_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplicate2(builder); break; case CMD_IN_REQUEST_REPLAY_TOKEN: REPLAY_TOKEN_REQUEST_DECODER.wrap( buffer, offset + encodedLength, HEADER_DECODER.blockLength(), HEADER_DECODER.version()); appendReplayToken(builder); break; default: builder.append(": unknown command"); } }
@Test void controlRequestStopAllReplays() { internalEncodeLogHeader(buffer, 0, 90, 90, () -> 10_325_000_000L); final StopAllReplaysRequestEncoder requestEncoder = new StopAllReplaysRequestEncoder(); requestEncoder.wrapAndApplyHeader(buffer, LOG_HEADER_LENGTH, headerEncoder) .controlSessionId(10) .correlationId(20) .recordingId(30); dissectControlRequest(CMD_IN_STOP_ALL_REPLAYS, buffer, 0, builder); assertEquals("[10.325000000] " + CONTEXT + ": " + CMD_IN_STOP_ALL_REPLAYS.name() + " [90/90]:" + " controlSessionId=10" + " correlationId=20" + " recordingId=30", builder.toString()); }
@Override public long get(long key1, long key2) { return super.get0(key1, key2); }
@Test public void testGotoNew() { hsa.dispose(); hsa.gotoNew(); final SlotAssignmentResult slot = insert(1, 2); final long gotValueAddr = hsa.get(1, 2); assertEquals(slot.address(), gotValueAddr); }
public void scanNotActiveChannel() { Iterator<Entry<String, ConsumerGroupInfo>> it = this.consumerTable.entrySet().iterator(); while (it.hasNext()) { Entry<String, ConsumerGroupInfo> next = it.next(); String group = next.getKey(); ConsumerGroupInfo consumerGroupInfo = next.getValue(); ConcurrentMap<Channel, ClientChannelInfo> channelInfoTable = consumerGroupInfo.getChannelInfoTable(); Iterator<Entry<Channel, ClientChannelInfo>> itChannel = channelInfoTable.entrySet().iterator(); while (itChannel.hasNext()) { Entry<Channel, ClientChannelInfo> nextChannel = itChannel.next(); ClientChannelInfo clientChannelInfo = nextChannel.getValue(); long diff = System.currentTimeMillis() - clientChannelInfo.getLastUpdateTimestamp(); if (diff > channelExpiredTimeout) { LOGGER.warn( "SCAN: remove expired channel from ConsumerManager consumerTable. channel={}, consumerGroup={}", RemotingHelper.parseChannelRemoteAddr(clientChannelInfo.getChannel()), group); callConsumerIdsChangeListener(ConsumerGroupEvent.CLIENT_UNREGISTER, group, clientChannelInfo, consumerGroupInfo.getSubscribeTopics()); RemotingHelper.closeChannel(clientChannelInfo.getChannel()); itChannel.remove(); } } if (channelInfoTable.isEmpty()) { LOGGER.warn( "SCAN: remove expired channel from ConsumerManager consumerTable, all clear, consumerGroup={}", group); it.remove(); } } removeExpireConsumerGroupInfo(); }
@Test public void scanNotActiveChannelTest() { clientChannelInfo.setLastUpdateTimestamp(System.currentTimeMillis() - brokerConfig.getChannelExpiredTimeout() * 2); consumerManager.scanNotActiveChannel(); Assertions.assertThat(consumerManager.getConsumerTable().size()).isEqualTo(0); }
@Override public List<Badge> convert( final List<Locale> acceptableLanguages, final List<AccountBadge> accountBadges, final boolean isSelf) { if (accountBadges.isEmpty() && badgeIdsEnabledForAll.isEmpty()) { return List.of(); } final Instant now = clock.instant(); final ResourceBundle resourceBundle = headerControlledResourceBundleLookup.getResourceBundle(BASE_NAME, acceptableLanguages); List<Badge> badges = accountBadges.stream() .filter(accountBadge -> (isSelf || accountBadge.isVisible()) && now.isBefore(accountBadge.getExpiration()) && knownBadges.containsKey(accountBadge.getId())) .map(accountBadge -> { BadgeConfiguration configuration = knownBadges.get(accountBadge.getId()); return newBadge( isSelf, accountBadge.getId(), configuration.getCategory(), resourceBundle.getString(accountBadge.getId() + "_name"), resourceBundle.getString(accountBadge.getId() + "_description"), configuration.getSprites(), configuration.getSvg(), configuration.getSvgs(), accountBadge.getExpiration(), accountBadge.isVisible()); }) .collect(Collectors.toCollection(ArrayList::new)); badges.addAll(badgeIdsEnabledForAll.stream().filter(knownBadges::containsKey).map(id -> { BadgeConfiguration configuration = knownBadges.get(id); return newBadge( isSelf, id, configuration.getCategory(), resourceBundle.getString(id + "_name"), resourceBundle.getString(id + "_description"), configuration.getSprites(), configuration.getSvg(), configuration.getSvgs(), now.plus(Duration.ofDays(1)), true); }).collect(Collectors.toList())); return badges; }
@Test void testConvertEmptyList() { BadgesConfiguration badgesConfiguration = createBadges(1); ConfiguredProfileBadgeConverter badgeConverter = new ConfiguredProfileBadgeConverter(clock, badgesConfiguration, new HeaderControlledResourceBundleLookup(resourceBundleFactory)); assertThat(badgeConverter.convert(List.of(Locale.getDefault()), List.of(), false)).isNotNull().isEmpty(); }
synchronized void add(int splitCount) { int pos = count % history.length; history[pos] = splitCount; count += 1; }
@Test public void testOneMoreThanFullHistory() { EnumerationHistory history = new EnumerationHistory(3); history.add(1); history.add(2); history.add(3); history.add(4); int[] expectedHistorySnapshot = {2, 3, 4}; testHistory(history, expectedHistorySnapshot); }
public static void modelBigQueryIO(Pipeline p) { modelBigQueryIO(p, "", "", ""); }
@Test public void testModelBigQueryIO() { // We cannot test BigQueryIO functionality in unit tests, therefore we limit ourselves // to making sure the pipeline containing BigQuery sources and sinks can be built. // // To run locally, set `runLocally` to `true`. You will have to set `project`, `dataset` and // `table` to the BigQuery table the test will write into. boolean runLocally = false; if (runLocally) { String project = "my-project"; String dataset = "samples"; // this must already exist String table = "modelBigQueryIO"; // this will be created if needed BigQueryOptions options = PipelineOptionsFactory.create().as(BigQueryOptions.class); options.setProject(project); options.setTempLocation("gs://" + project + "/samples/temp/"); Pipeline p = Pipeline.create(options); Snippets.modelBigQueryIO(p, project, dataset, table); p.run(); } else { Pipeline p = Pipeline.create(); Snippets.modelBigQueryIO(p); } }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("System"); setAttribute(protobuf, "Server ID", server.getId()); setAttribute(protobuf, "Version", getVersion()); setAttribute(protobuf, "Edition", sonarRuntime.getEdition().getLabel()); setAttribute(protobuf, NCLOC.getName(), statisticsSupport.getLinesOfCode()); setAttribute(protobuf, "Container", containerSupport.isRunningInContainer()); setAttribute(protobuf, "External Users and Groups Provisioning", commonSystemInformation.getManagedInstanceProviderName()); setAttribute(protobuf, "External User Authentication", commonSystemInformation.getExternalUserAuthentication()); addIfNotEmpty(protobuf, "Accepted external identity providers", commonSystemInformation.getEnabledIdentityProviders()); addIfNotEmpty(protobuf, "External identity providers whose users are allowed to sign themselves up", commonSystemInformation.getAllowsToSignUpEnabledIdentityProviders()); setAttribute(protobuf, "High Availability", false); setAttribute(protobuf, "Official Distribution", officialDistribution.check()); setAttribute(protobuf, "Force authentication", commonSystemInformation.getForceAuthentication()); setAttribute(protobuf, "Home Dir", config.get(PATH_HOME.getKey()).orElse(null)); setAttribute(protobuf, "Data Dir", config.get(PATH_DATA.getKey()).orElse(null)); setAttribute(protobuf, "Temp Dir", config.get(PATH_TEMP.getKey()).orElse(null)); setAttribute(protobuf, "Processors", Runtime.getRuntime().availableProcessors()); return protobuf.build(); }
@Test public void toProtobuf_whenAllowsToSignUpEnabledIdentityProviders_shouldWriteThem() { when(commonSystemInformation.getAllowsToSignUpEnabledIdentityProviders()).thenReturn(List.of("GitHub")); ProtobufSystemInfo.Section protobuf = underTest.toProtobuf(); assertThatAttributeIs(protobuf, "External identity providers whose users are allowed to sign themselves up", "GitHub"); }
@Override public Optional<WorkItem> getWorkItem() throws IOException { List<String> workItemTypes = ImmutableList.of( WORK_ITEM_TYPE_MAP_TASK, WORK_ITEM_TYPE_SEQ_MAP_TASK, WORK_ITEM_TYPE_REMOTE_SOURCE_TASK); // All remote sources require the "remote_source" capability. Dataflow's // custom sources are further tagged with the format "custom_source". List<String> capabilities = new ArrayList<String>( Arrays.asList( options.getWorkerId(), CAPABILITY_REMOTE_SOURCE, PropertyNames.CUSTOM_SOURCE_FORMAT)); if (options.getWorkerPool() != null) { capabilities.add(options.getWorkerPool()); } Optional<WorkItem> workItem = getWorkItemInternal(workItemTypes, capabilities); if (!workItem.isPresent()) { // Normal case, this means that the response contained no work, i.e. no work is available // at this time. return Optional.empty(); } if (workItem.get().getId() == null) { logger.debug("Discarding invalid work item {}", workItem.get()); return Optional.empty(); } WorkItem work = workItem.get(); final String stage; if (work.getMapTask() != null) { stage = work.getMapTask().getStageName(); logger.info("Starting MapTask stage {}", stage); } else if (work.getSeqMapTask() != null) { stage = work.getSeqMapTask().getStageName(); logger.info("Starting SeqMapTask stage {}", stage); } else if (work.getSourceOperationTask() != null) { stage = work.getSourceOperationTask().getStageName(); logger.info("Starting SourceOperationTask stage {}", stage); } else { stage = null; } DataflowWorkerLoggingMDC.setStageName(stage); stageStartTime.set(DateTime.now()); DataflowWorkerLoggingMDC.setWorkId(Long.toString(work.getId())); return workItem; }
@Test public void testCloudServiceCallMultipleWorkItems() throws Exception { expectedException.expect(IOException.class); expectedException.expectMessage( "This version of the SDK expects no more than one work item from the service"); WorkItem workItem1 = createWorkItem(PROJECT_ID, JOB_ID); WorkItem workItem2 = createWorkItem(PROJECT_ID, JOB_ID); MockLowLevelHttpResponse response = generateMockResponse(workItem1, workItem2); MockLowLevelHttpRequest request = new MockLowLevelHttpRequest().setResponse(response); MockHttpTransport transport = new MockHttpTransport.Builder().setLowLevelHttpRequest(request).build(); DataflowWorkerHarnessOptions pipelineOptions = createPipelineOptionsWithTransport(transport); WorkUnitClient client = new DataflowWorkUnitClient(pipelineOptions, LOG); client.getWorkItem(); }
@VisibleForTesting MissingSegmentInfo findMissingSegments(Map<String, Map<String, String>> idealStateMap, Instant now) { // create the maps Map<Integer, LLCSegmentName> partitionGroupIdToLatestConsumingSegmentMap = new HashMap<>(); Map<Integer, LLCSegmentName> partitionGroupIdToLatestCompletedSegmentMap = new HashMap<>(); idealStateMap.forEach((segmentName, instanceToStatusMap) -> { LLCSegmentName llcSegmentName = LLCSegmentName.of(segmentName); if (llcSegmentName != null) { // Skip the uploaded realtime segments that don't conform to llc naming if (instanceToStatusMap.containsValue(SegmentStateModel.CONSUMING)) { updateMap(partitionGroupIdToLatestConsumingSegmentMap, llcSegmentName); } else if (instanceToStatusMap.containsValue(SegmentStateModel.ONLINE)) { updateMap(partitionGroupIdToLatestCompletedSegmentMap, llcSegmentName); } } }); MissingSegmentInfo missingSegmentInfo = new MissingSegmentInfo(); if (!_partitionGroupIdToLargestStreamOffsetMap.isEmpty()) { _partitionGroupIdToLargestStreamOffsetMap.forEach((partitionGroupId, largestStreamOffset) -> { if (!partitionGroupIdToLatestConsumingSegmentMap.containsKey(partitionGroupId)) { LLCSegmentName latestCompletedSegment = partitionGroupIdToLatestCompletedSegmentMap.get(partitionGroupId); if (latestCompletedSegment == null) { // There's no consuming or completed segment for this partition group. Possibilities: // 1) it's a new partition group that has not yet been detected // 2) the first consuming segment has been deleted from ideal state manually missingSegmentInfo._newPartitionGroupCount++; missingSegmentInfo._totalCount++; } else { // Completed segment is available, but there's no consuming segment. // Note that there is no problem in case the partition group has reached its end of life. SegmentZKMetadata segmentZKMetadata = _segmentMetadataFetcher .fetchSegmentZkMetadata(_realtimeTableName, latestCompletedSegment.getSegmentName()); StreamPartitionMsgOffset completedSegmentEndOffset = _streamPartitionMsgOffsetFactory.create(segmentZKMetadata.getEndOffset()); if (completedSegmentEndOffset.compareTo(largestStreamOffset) < 0) { // there are unconsumed messages available on the stream missingSegmentInfo._totalCount++; updateMaxDurationInfo(missingSegmentInfo, partitionGroupId, segmentZKMetadata.getCreationTime(), now); } } } }); } else { partitionGroupIdToLatestCompletedSegmentMap.forEach((partitionGroupId, latestCompletedSegment) -> { if (!partitionGroupIdToLatestConsumingSegmentMap.containsKey(partitionGroupId)) { missingSegmentInfo._totalCount++; long segmentCompletionTimeMillis = _segmentMetadataFetcher .fetchSegmentCompletionTime(_realtimeTableName, latestCompletedSegment.getSegmentName()); updateMaxDurationInfo(missingSegmentInfo, partitionGroupId, segmentCompletionTimeMillis, now); } }); } return missingSegmentInfo; }
@Test public void noMissingConsumingSegmentsScenario1() { // scenario 1: no missing segments, but connecting to stream throws exception // only ideal state info is used Map<String, Map<String, String>> idealStateMap = new HashMap<>(); // partition 0 idealStateMap.put("tableA__0__0__20220601T0900Z", ImmutableMap.of("ServerX", "ONLINE", "ServerY", "ONLINE")); idealStateMap.put("tableA__0__1__20220601T1200Z", ImmutableMap.of("ServerX", "ONLINE", "ServerY", "ONLINE")); idealStateMap.put("tableA__0__2__20220601T1500Z", ImmutableMap.of("ServerX", "CONSUMING", "ServerY", "CONSUMING")); // partition 1 idealStateMap.put("tableA__1__0__20220601T0900Z", ImmutableMap.of("ServerX", "ONLINE", "ServerY", "ONLINE")); idealStateMap.put("tableA__1__1__20220601T1200Z", ImmutableMap.of("ServerX", "ONLINE", "ServerY", "ONLINE")); idealStateMap.put("tableA__1__2__20220601T1500Z", ImmutableMap.of("ServerX", "CONSUMING", "ServerY", "CONSUMING")); // partition 2 idealStateMap.put("tableA__2__0__20220601T0900Z", ImmutableMap.of("ServerX", "ONLINE", "ServerY", "ONLINE")); idealStateMap.put("tableA__2__1__20220601T1200Z", ImmutableMap.of("ServerX", "ONLINE", "ServerY", "ONLINE")); idealStateMap.put("tableA__2__2__20220601T1500Z", ImmutableMap.of("ServerX", "CONSUMING", "ServerY", "CONSUMING")); // partition 3 idealStateMap.put("tableA__3__0__20220601T0900Z", ImmutableMap.of("ServerX", "ONLINE", "ServerY", "ONLINE")); idealStateMap.put("tableA__3__1__20220601T1200Z", ImmutableMap.of("ServerX", "ONLINE", "ServerY", "ONLINE")); idealStateMap.put("tableA__3__2__20220601T1500Z", ImmutableMap.of("ServerX", "CONSUMING", "ServerY", "CONSUMING")); Instant now = Instant.parse("2022-06-01T18:00:00.00Z"); MissingConsumingSegmentFinder finder = new MissingConsumingSegmentFinder("tableA", null, new HashMap<>(), null); MissingConsumingSegmentFinder.MissingSegmentInfo info = finder.findMissingSegments(idealStateMap, now); assertEquals(info._totalCount, 0); assertEquals(info._newPartitionGroupCount, 0); assertEquals(info._maxDurationInMinutes, 0); }
static void checkValidCollectionName(String databaseName, String collectionName) { String fullCollectionName = databaseName + "." + collectionName; if (collectionName.length() < MIN_COLLECTION_NAME_LENGTH) { throw new IllegalArgumentException("Collection name cannot be empty."); } if (fullCollectionName.length() > MAX_COLLECTION_NAME_LENGTH) { throw new IllegalArgumentException( "Collection name " + fullCollectionName + " cannot be longer than " + MAX_COLLECTION_NAME_LENGTH + " characters, including the database name and dot."); } if (ILLEGAL_COLLECTION_CHARS.matcher(collectionName).find()) { throw new IllegalArgumentException( "Collection name " + collectionName + " is not a valid name. Only letters, numbers, hyphens, underscores and exclamation points are allowed."); } if (collectionName.charAt(0) != '_' && !Character.isLetter(collectionName.charAt(0))) { throw new IllegalArgumentException( "Collection name " + collectionName + " must start with a letter or an underscore."); } String illegalKeyword = "system."; if (collectionName.startsWith(illegalKeyword)) { throw new IllegalArgumentException( "Collection name " + collectionName + " cannot start with the prefix \"" + illegalKeyword + "\"."); } }
@Test public void testCheckValidCollectionNameThrowsErrorWhenNameDoesNotBeginWithLetterOrUnderscore() { assertThrows( IllegalArgumentException.class, () -> checkValidCollectionName("test-database", "1test-collection")); }
@Override public void write(OutputStream os, String s) throws IOException{ os.write(intToByteArray(s.length()/2,lengthPrefixLen)); if(log.isDebugEnabled()) { log.debug("Wrote: " + s.length()/2 + " bytes"); } this.tcpClient.write(os, s); }
@Test public void testError() throws Exception { ByteArrayOutputStream os = null; ByteArrayInputStream is = null; LengthPrefixedBinaryTCPClientImpl lp = new LengthPrefixedBinaryTCPClientImpl(); try { lp.write(os, is); fail("Expected java.lang.UnsupportedOperationException"); } catch (java.lang.UnsupportedOperationException expected) { } }
@SqlNullable @Description("Returns the geometry element at the specified index (indices started with 1)") @ScalarFunction("ST_GeometryN") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stGeometryN(@SqlType(GEOMETRY_TYPE_NAME) Slice input, @SqlType(INTEGER) long index) { Geometry geometry = deserialize(input); if (geometry.isEmpty()) { return null; } GeometryType type = GeometryType.getForJtsGeometryType(geometry.getGeometryType()); if (!type.isMultitype()) { if (index == 1) { return input; } return null; } GeometryCollection geometryCollection = ((GeometryCollection) geometry); if (index < 1 || index > geometryCollection.getNumGeometries()) { return null; } return serialize(geometryCollection.getGeometryN((int) index - 1)); }
@Test public void testSTGeometryN() { assertSTGeometryN("POINT EMPTY", 1, null); assertSTGeometryN("LINESTRING EMPTY", 1, null); assertSTGeometryN("POLYGON EMPTY", 1, null); assertSTGeometryN("MULTIPOINT EMPTY", 1, null); assertSTGeometryN("MULTILINESTRING EMPTY", 1, null); assertSTGeometryN("MULTIPOLYGON EMPTY", 1, null); assertSTGeometryN("POINT EMPTY", 0, null); assertSTGeometryN("LINESTRING EMPTY", 0, null); assertSTGeometryN("POLYGON EMPTY", 0, null); assertSTGeometryN("MULTIPOINT EMPTY", 0, null); assertSTGeometryN("MULTILINESTRING EMPTY", 0, null); assertSTGeometryN("MULTIPOLYGON EMPTY", 0, null); assertSTGeometryN("POINT (1 2)", 1, "POINT (1 2)"); assertSTGeometryN("POINT (1 2)", -1, null); assertSTGeometryN("POINT (1 2)", 2, null); assertSTGeometryN("LINESTRING(77.29 29.07, 77.42 29.26, 77.27 29.31, 77.29 29.07)", 1, "LINESTRING (77.29 29.07, 77.42 29.26, 77.27 29.31, 77.29 29.07)"); assertSTGeometryN("LINESTRING(77.29 29.07, 77.42 29.26, 77.27 29.31, 77.29 29.07)", 2, null); assertSTGeometryN("LINESTRING(77.29 29.07, 77.42 29.26, 77.27 29.31, 77.29 29.07)", -1, null); assertSTGeometryN("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))", 1, "POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))"); assertSTGeometryN("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))", 2, null); assertSTGeometryN("POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))", -1, null); assertSTGeometryN("MULTIPOINT (1 2, 2 4, 3 6, 4 8)", 1, "POINT (1 2)"); assertSTGeometryN("MULTIPOINT (1 2, 2 4, 3 6, 4 8)", 2, "POINT (2 4)"); assertSTGeometryN("MULTIPOINT (1 2, 2 4, 3 6, 4 8)", 0, null); assertSTGeometryN("MULTIPOINT (1 2, 2 4, 3 6, 4 8)", 5, null); assertSTGeometryN("MULTIPOINT (1 2, 2 4, 3 6, 4 8)", -1, null); assertSTGeometryN("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", 1, "LINESTRING (1 1, 5 1)"); assertSTGeometryN("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", 2, "LINESTRING (2 4, 4 4)"); assertSTGeometryN("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", 0, null); assertSTGeometryN("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", 3, null); assertSTGeometryN("MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))", -1, null); assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", 1, "POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))"); assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", 2, "POLYGON ((2 4, 2 6, 6 6, 6 4, 2 4))"); assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", 0, null); assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", 3, null); assertSTGeometryN("MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((2 4, 2 6, 6 6, 6 4, 2 4)))", -1, null); assertSTGeometryN("GEOMETRYCOLLECTION(POINT(2 3), LINESTRING (2 3, 3 4))", 1, "POINT (2 3)"); assertSTGeometryN("GEOMETRYCOLLECTION(POINT(2 3), LINESTRING (2 3, 3 4))", 2, "LINESTRING (2 3, 3 4)"); assertSTGeometryN("GEOMETRYCOLLECTION(POINT(2 3), LINESTRING (2 3, 3 4))", 3, null); assertSTGeometryN("GEOMETRYCOLLECTION(POINT(2 3), LINESTRING (2 3, 3 4))", 0, null); assertSTGeometryN("GEOMETRYCOLLECTION(POINT(2 3), LINESTRING (2 3, 3 4))", -1, null); }
@ConstantFunction(name = "bitand", argTypes = {SMALLINT, SMALLINT}, returnType = SMALLINT) public static ConstantOperator bitandSmallInt(ConstantOperator first, ConstantOperator second) { return ConstantOperator.createSmallInt((short) (first.getSmallint() & second.getSmallint())); }
@Test public void bitandSmallInt() { assertEquals(10, ScalarOperatorFunctions.bitandSmallInt(O_SI_10, O_SI_10).getSmallint()); }
@Override void execute() { String query = getCl().getJDOQLQuery(); if (query.toLowerCase().trim().startsWith("select")) { try { executeJDOQLSelect(query); } catch (Exception e) { throw new RuntimeException(e); } } else if (query.toLowerCase().trim().startsWith("update")) { try { executeJDOQLUpdate(query); } catch (Exception e) { throw new RuntimeException(e); } } else { throw new IllegalArgumentException("HiveMetaTool:Unsupported statement type, only select and update supported"); } }
@Test public void testIllegalQuery() throws Exception { exception.expect(IllegalArgumentException.class); exception.expectMessage("HiveMetaTool:Unsupported statement type, only select and update supported"); String illegalQuery = "abcde"; MetaToolTaskExecuteJDOQLQuery t = new MetaToolTaskExecuteJDOQLQuery(); t.setCommandLine(new HiveMetaToolCommandLine(new String[] {"-executeJDOQL", illegalQuery})); t.execute(); }
public static Env transformEnv(String envName) { final String envWellFormName = getWellFormName(envName); if (Env.exists(envWellFormName)) { return Env.valueOf(envWellFormName); } // cannot be found or blank name return Env.UNKNOWN; }
@Test public void testTransformEnvSpecialCase() { // Prod/Pro assertEquals(Env.PRO, Env.transformEnv("prod")); assertEquals(Env.PRO, Env.transformEnv("PROD")); //FAT/FWS assertEquals(Env.FAT, Env.transformEnv("FWS")); assertEquals(Env.FAT, Env.transformEnv("fws")); }
@Override public void accept(K key, V value) { subject.onNext(Map.entry(key, value)); }
@Test public void singleKey() { var writerCalled = new AtomicBoolean(); // Given this cache... var writer = new WriteBehindCacheWriter.Builder<Integer, ZonedDateTime>() .coalesce(BinaryOperator.maxBy(ZonedDateTime::compareTo)) .writeAction(entries -> writerCalled.set(true)) .bufferTime(Duration.ofSeconds(1)) .build(); Cache<Integer, ZonedDateTime> cache = Caffeine.newBuilder().build(); // When this cache update happens... cache.asMap().computeIfAbsent(1, key -> { var value = ZonedDateTime.now(); writer.accept(key, value); return value; }); // Then the write behind action is called await().untilTrue(writerCalled); }
public static final RowMetaInterface getResultRowMeta() { RowMetaInterface rowMeta = new RowMeta(); rowMeta.addValueMeta( new ValueMetaString( BaseMessages.getString( PKG, "SearchResult.TransOrJob" ) ) ); rowMeta.addValueMeta( new ValueMetaString( BaseMessages.getString( PKG, "SearchResult.StepDatabaseNotice" ) ) ); rowMeta.addValueMeta( new ValueMetaString( BaseMessages.getString( PKG, "SearchResult.String" ) ) ); rowMeta.addValueMeta( new ValueMetaString( BaseMessages.getString( PKG, "SearchResult.FieldName" ) ) ); return rowMeta; }
@Test public void testgetResultRowMeta() { RowMetaInterface rm = StringSearchResult.getResultRowMeta(); assertNotNull( rm ); assertEquals( 4, rm.getValueMetaList().size() ); assertEquals( ValueMetaInterface.TYPE_STRING, rm.getValueMeta( 0 ).getType() ); assertEquals( BaseMessages.getString( PKG, "SearchResult.TransOrJob" ), rm.getValueMeta( 0 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, rm.getValueMeta( 1 ).getType() ); assertEquals( BaseMessages.getString( PKG, "SearchResult.StepDatabaseNotice" ), rm.getValueMeta( 1 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, rm.getValueMeta( 2 ).getType() ); assertEquals( BaseMessages.getString( PKG, "SearchResult.String" ), rm.getValueMeta( 2 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, rm.getValueMeta( 3 ).getType() ); assertEquals( BaseMessages.getString( PKG, "SearchResult.FieldName" ), rm.getValueMeta( 3 ).getName() ); }
public static Map<String, Set<String>> getIntersectedPartitions(Map<String, Range<PartitionKey>> srcRangeMap, Map<String, Range<PartitionKey>> dstRangeMap) { if (dstRangeMap.isEmpty()) { return srcRangeMap.keySet().stream().collect(Collectors.toMap(Function.identity(), Sets::newHashSet)); } // TODO: Callers may use `List<PartitionRange>` directly. List<PRangeCellPlus> srcRanges = toPRangeCellPlus(srcRangeMap, true); List<PRangeCellPlus> dstRanges = toPRangeCellPlus(dstRangeMap, true); return getIntersectedPartitions(srcRanges, dstRanges); }
@Test public void test_getIntersectedPartitions() throws AnalysisException { List<PRangeCellPlus> srcs = Arrays.asList( buildPartitionRange("p20230801", "00000101", "20230801"), buildPartitionRange("p20230802", "20230801", "20230802"), buildPartitionRange("p20230803", "20230802", "20230803") ); List<PRangeCellPlus> dsts = Arrays.asList( buildPartitionRange("p000101_202308", "0001-01-01", "2023-08-01"), buildPartitionRange("p202308_202309", "2023-08-01", "2023-09-01") ); Map<String, Set<String>> res = SyncPartitionUtils.getIntersectedPartitions(srcs, dsts); Assert.assertEquals( ImmutableMap.of( "p20230801", ImmutableSet.of("p000101_202308"), "p20230802", ImmutableSet.of("p202308_202309"), "p20230803", ImmutableSet.of("p202308_202309") ), res); }
@Bean @ConditionalOnMissingBean(NacosDataChangedInit.class) public DataChangedInit nacosDataChangedInit(final ConfigService configService) { return new NacosDataChangedInit(configService); }
@Test public void testNacosDataInit() { NacosSyncConfiguration nacosListener = new NacosSyncConfiguration(); NacosConfigService configService = mock(NacosConfigService.class); assertNotNull(nacosListener.nacosDataChangedInit(configService)); }
public PaginationContext createPaginationContext(final TopProjectionSegment topProjectionSegment, final Collection<ExpressionSegment> expressions, final List<Object> params) { Collection<AndPredicate> andPredicates = expressions.stream().flatMap(each -> ExpressionExtractUtils.getAndPredicates(each).stream()).collect(Collectors.toList()); Optional<ExpressionSegment> rowNumberPredicate = expressions.isEmpty() ? Optional.empty() : getRowNumberPredicate(andPredicates, topProjectionSegment.getAlias()); Optional<PaginationValueSegment> offset = rowNumberPredicate.isPresent() ? createOffsetWithRowNumber(rowNumberPredicate.get()) : Optional.empty(); PaginationValueSegment rowCount = topProjectionSegment.getTop(); return new PaginationContext(offset.orElse(null), rowCount, params); }
@Test void assertCreatePaginationContextWhenRowNumberPredicateNotPresent() { TopProjectionSegment topProjectionSegment = new TopProjectionSegment(0, 10, null, "rowNumberAlias"); PaginationContext paginationContext = topPaginationContextEngine.createPaginationContext(topProjectionSegment, Collections.emptyList(), Collections.emptyList()); assertFalse(paginationContext.getOffsetSegment().isPresent()); assertFalse(paginationContext.getRowCountSegment().isPresent()); }
@Override public boolean exceedsRateLimitPerFrequency(Task task, TaskDef taskDef) { int rateLimit = taskDef == null ? task.getRateLimitPerFrequency() : taskDef.getRateLimitPerFrequency(); if (rateLimit <= 0) { return false; } int bucketSize = taskDef == null ? task.getRateLimitFrequencyInSeconds() : taskDef.getRateLimitFrequencyInSeconds(); String taskName = task.getTaskDefName(); try { return withRetryableQuery( GET_RUNNING_TASK_COUNT_BY_NAME_STATEMENT, statement -> { statement.setString(1, taskName); statement.setLong(2, System.currentTimeMillis() - 1000 * bucketSize); }, result -> { if (result.next()) { int cnt = result.getInt(COUNT_COLUMN); if (cnt > rateLimit) { LOG.info( "Got {} running instance for the task name {} in the past {} second exceeding a limit {}", cnt, taskName, bucketSize, rateLimit); return true; } else { LOG.debug( "Got {} running instance for the task name {} in the past {} second within a limit {}", cnt, taskName, bucketSize, rateLimit); } } return false; }); } catch (Exception e) { LOG.warn("Failed checking rate limit for task {} due to {}", taskName, e.getMessage()); return true; } }
@Test public void testExceedsRateLimitWithinLimit() { TaskDef taskDef = createTaskDef(10, 0); Task task = createRunningTestTask(TEST_TASK_ID_1); executionDAO.updateTask(task); task = createRunningTestTask(TEST_TASK_ID_2); executionDAO.updateTask(task); assertFalse(dao.exceedsRateLimitPerFrequency(task, taskDef)); }
public static <S> S load(Class<S> service, ClassLoader loader) throws EnhancedServiceNotFoundException { return InnerEnhancedServiceLoader.getServiceLoader(service).load(loader, true); }
@Test public void testLoadByClassAndActivateNameAndArgsTypeAndArgs() { Hello2 load = EnhancedServiceLoader .load(Hello2.class, "JapaneseHello", new Class[] {String.class}, new Object[] {"msg"}); assertThat(load).isInstanceOf(Hello2.class); }
public void applyClientConfiguration(String account, DataLakeFileSystemClientBuilder builder) { String sasToken = adlsSasTokens.get(account); if (sasToken != null && !sasToken.isEmpty()) { builder.sasToken(sasToken); } else if (namedKeyCreds != null) { builder.credential( new StorageSharedKeyCredential(namedKeyCreds.getKey(), namedKeyCreds.getValue())); } else { builder.credential(new DefaultAzureCredentialBuilder().build()); } // apply connection string last so its parameters take precedence, e.g. SAS token String connectionString = adlsConnectionStrings.get(account); if (connectionString != null && !connectionString.isEmpty()) { builder.endpoint(connectionString); } else { builder.endpoint("https://" + account); } }
@Test public void testNoConnectionString() { AzureProperties props = new AzureProperties(); DataLakeFileSystemClientBuilder clientBuilder = mock(DataLakeFileSystemClientBuilder.class); props.applyClientConfiguration("account", clientBuilder); verify(clientBuilder).endpoint("https://account"); }
@ScalarOperator(BETWEEN) @SqlType(StandardTypes.BOOLEAN) public static boolean between(@SqlType("unknown") boolean value, @SqlType("unknown") boolean min, @SqlType("unknown") boolean max) { throw new AssertionError("value of unknown type should all be NULL"); }
@Test public void testBetween() { assertFunction("NULL BETWEEN NULL AND NULL", BOOLEAN, null); }
@Override public void execute(SensorContext context) { for (InputFile file : context.fileSystem().inputFiles(context.fileSystem().predicates().hasLanguages(Xoo.KEY))) { processFileSymbol(file, context); } }
@Test public void testExecution() throws IOException { File symbol = new File(baseDir, "src/foo.xoo.symbol"); FileUtils.write(symbol, "1:1:1:4,1:7:1:10\n1:11:1:13,1:14:1:33\n\n#comment"); InputFile inputFile = new TestInputFileBuilder("foo", "src/foo.xoo") .initMetadata("xoo file with some source code and length over 33") .setLanguage(Xoo.KEY) .setModuleBaseDir(baseDir.toPath()) .build(); context.fileSystem().add(inputFile); sensor.execute(context); assertThat(context.referencesForSymbolAt("foo:src/foo.xoo", 1, 2)) .containsOnly(new DefaultTextRange(new DefaultTextPointer(1, 7), new DefaultTextPointer(1, 10))); assertThat(context.referencesForSymbolAt("foo:src/foo.xoo", 1, 12)) .containsOnly(new DefaultTextRange(new DefaultTextPointer(1, 14), new DefaultTextPointer(1, 33))); }
List<ParsedTerm> identifyUnknownFields(final Set<String> availableFields, final List<ParsedTerm> terms) { final Map<String, List<ParsedTerm>> groupedByField = terms.stream() .filter(t -> !t.isDefaultField()) .filter(term -> !SEARCHABLE_ES_FIELDS.contains(term.getRealFieldName())) .filter(term -> !RESERVED_SETTABLE_FIELDS.contains(term.getRealFieldName())) .filter(term -> !availableFields.contains(term.getRealFieldName())) .distinct() .collect(Collectors.groupingBy(ParsedTerm::getRealFieldName)); return unknownFieldsListLimiter.filterElementsContainingUsefulInformation(groupedByField); }
@Test void testIdentifiesUnknownField() { final ParsedTerm unknownField = ParsedTerm.create("strange_field", "!!!"); final List<ParsedTerm> unknownFields = toTest.identifyUnknownFields( Set.of("some_normal_field"), List.of( ParsedTerm.create("some_normal_field", "Haba, haba, haba!"), unknownField ) ); assertThat(unknownFields) .hasSize(1) .contains(unknownField); }
@Override public Artifact uploadArtifact(String artifactName, String localPath) throws IOException { return uploadArtifact(artifactName, Paths.get(localPath)); }
@Test public void testUploadArtifact() throws IOException { when(client.create(any(BlobInfo.class), any(byte[].class))).thenReturn(blob); GcsArtifact actual = (GcsArtifact) gcsClient.uploadArtifact(ARTIFACT_NAME, LOCAL_PATH); verify(client).create(blobInfoCaptor.capture(), contentsCaptor.capture()); BlobInfo actualInfo = blobInfoCaptor.getValue(); assertThat(actual.blob).isSameInstanceAs(blob); assertThat(actualInfo.getBucket()).isEqualTo(BUCKET); assertThat(actualInfo.getName()) .isEqualTo(String.format("%s/%s/%s", TEST_CLASS, gcsClient.runId(), ARTIFACT_NAME)); assertThat(contentsCaptor.getValue()).isEqualTo(TEST_ARTIFACT_CONTENTS); }
public static void rename( List<ResourceId> srcResourceIds, List<ResourceId> destResourceIds, MoveOptions... moveOptions) throws IOException { validateSrcDestLists(srcResourceIds, destResourceIds); if (srcResourceIds.isEmpty()) { return; } renameInternal( getFileSystemInternal(srcResourceIds.iterator().next().getScheme()), srcResourceIds, destResourceIds, moveOptions); }
@Test public void testRenameThrowsNoSuchFileException() throws Exception { Path existingPath = temporaryFolder.newFile().toPath(); Path nonExistentPath = existingPath.resolveSibling("non-existent"); Path destPath1 = existingPath.resolveSibling("dest1"); Path destPath2 = nonExistentPath.resolveSibling("dest2"); createFileWithContent(existingPath, "content1"); thrown.expect(NoSuchFileException.class); FileSystems.rename( toResourceIds(ImmutableList.of(existingPath, nonExistentPath), false /* isDirectory */), toResourceIds(ImmutableList.of(destPath1, destPath2), false /* isDirectory */)); }
public boolean isAllowed() { if (lock.tryLock()) { try { if (lastAllowed.plus(perDuration).isBefore(now())) { lastAllowed = now(); return true; } return false; } finally { lock.unlock(); } } else { return false; } }
@Test void testRateLimit8PerSecond() { final RateLimiter rateLimit = rateLimit().atRequests(8).per(SECOND); List<Boolean> allowed = new ArrayList<>(); await() .pollInterval(ofMillis(20)) .atMost(ofMillis(1050)) .untilAsserted(() -> { allowed.add(rateLimit.isAllowed()); assertThat(allowed.stream().filter(x -> x)).hasSize(8); }); }
@Override public void suspend(Throwable cause) { suspend(cause, null); }
@Test void testSuspendCanBeCalledWhenExecutionGraphHasReachedGloballyTerminalState() throws Exception { try (MockStateWithExecutionGraphContext context = new MockStateWithExecutionGraphContext()) { final StateTrackingMockExecutionGraph testingExecutionGraph = new StateTrackingMockExecutionGraph(); testingExecutionGraph.transitionToRunning(); final TestingStateWithExecutionGraph stateWithExecutionGraph = createStateWithExecutionGraph(context, testingExecutionGraph); context.setExpectFinished( archivedExecutionGraph -> assertThat(archivedExecutionGraph.getState()) .isEqualTo(JobStatus.FAILED)); // transition to FAILED testingExecutionGraph.failJob( new FlinkException("Transition job to FAILED state"), System.currentTimeMillis()); testingExecutionGraph.completeTerminationFuture(JobStatus.FAILED); assertThat(testingExecutionGraph.getState()).isEqualTo(JobStatus.FAILED); // As long as we don't execute StateWithExecutionGraph#onGloballyTerminalState // immediately when reaching a globally terminal state or if don't immediately leave // this state when reaching a globally terminal state, this test is still valid because // the suspend call can happen asynchronously. assertThatFuture(stateWithExecutionGraph.getGloballyTerminalStateFuture()).isNotDone(); stateWithExecutionGraph.suspend(new FlinkException("Test exception")); } }
@VisibleForTesting static DBCollection prepareCollection(final MongoConnection mongoConnection) { final DB db = mongoConnection.getDatabase(); DBCollection coll = db.getCollection(COLLECTION_NAME); coll.createIndex(DBSort .asc("timestamp") .asc("producer") .asc("consumers")); coll.setWriteConcern(WriteConcern.JOURNALED); return coll; }
@Test public void prepareCollectionCreatesCollectionIfItDoesNotExist() throws Exception { @SuppressWarnings("deprecation") final DB database = mongoConnection.getDatabase(); database.getCollection(ClusterEventPeriodical.COLLECTION_NAME).drop(); assertThat(database.collectionExists(ClusterEventPeriodical.COLLECTION_NAME)).isFalse(); DBCollection collection = ClusterEventPeriodical.prepareCollection(mongoConnection); assertThat(collection.getName()).isEqualTo(ClusterEventPeriodical.COLLECTION_NAME); assertThat(collection.getIndexInfo()).hasSize(2); assertThat(collection.getWriteConcern()).isEqualTo(WriteConcern.JOURNALED); }
@Override public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest)req; HttpServletResponse response = (HttpServletResponse)res; // Do not allow framing; OF-997 response.setHeader("X-Frame-Options", JiveGlobals.getProperty("adminConsole.frame-options", "SAMEORIGIN")); // Reset the defaultLoginPage variable String loginPage = defaultLoginPage; if (loginPage == null) { loginPage = request.getContextPath() + (AuthFactory.isOneTimeAccessTokenEnabled() ? "/loginToken.jsp" : "/login.jsp" ); } // Get the page we're on: String url = request.getRequestURI().substring(1); if (url.startsWith("plugins/")) { url = url.substring("plugins/".length()); } // See if it's contained in the exclude list. If so, skip filter execution boolean doExclude = false; for (String exclude : excludes) { if (testURLPassesExclude(url, exclude)) { doExclude = true; break; } } if (!doExclude || IP_ACCESS_IGNORE_EXCLUDES.getValue()) { if (!passesBlocklist(req) || !passesAllowList(req)) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } } if (!doExclude) { WebManager manager = new WebManager(); manager.init(request, response, request.getSession(), context); boolean haveOneTimeToken = manager.getAuthToken() instanceof AuthToken.OneTimeAuthToken; User loggedUser = manager.getUser(); boolean loggedAdmin = loggedUser == null ? false : adminManager.isUserAdmin(loggedUser.getUsername(), true); if (!haveOneTimeToken && !loggedAdmin && !authUserFromRequest(request)) { response.sendRedirect(getRedirectURL(request, loginPage, null)); return; } } chain.doFilter(req, res); }
@Test public void willNotRedirectARequestFromAnAdminUser() throws Exception { AuthCheckFilter.SERVLET_REQUEST_AUTHENTICATOR.setValue(AdminUserServletAuthenticatorClass.class); final AuthCheckFilter filter = new AuthCheckFilter(adminManager, loginLimitManager); filter.doFilter(request, response, filterChain); verify(response, never()).sendRedirect(anyString()); verify(loginLimitManager).recordSuccessfulAttempt(adminUser, remoteAddr); final ArgumentCaptor<AuthToken> argumentCaptor = ArgumentCaptor.forClass(AuthToken.class); verify(httpSession).setAttribute(eq("jive.admin.authToken"), argumentCaptor.capture()); final AuthToken authToken = argumentCaptor.getValue(); assertThat(authToken.getUsername(), is(adminUser)); }
public static void cleanupInternalTopicSchemas( final String applicationId, final SchemaRegistryClient schemaRegistryClient) { getInternalSubjectNames(applicationId, schemaRegistryClient) .forEach(subject -> tryDeleteInternalSubject( applicationId, schemaRegistryClient, subject)); }
@Test public void shouldDeleteChangeLogTopicSchema() throws Exception { // Given: when(schemaRegistryClient.getAllSubjects()).thenReturn(ImmutableList.of( APP_ID + "SOME-changelog-key", APP_ID + "SOME-changelog-value" )); // When: SchemaRegistryUtil.cleanupInternalTopicSchemas(APP_ID, schemaRegistryClient); // Then not exception: verify(schemaRegistryClient).deleteSubject(APP_ID + "SOME-changelog-key"); verify(schemaRegistryClient).deleteSubject(APP_ID + "SOME-changelog-value"); }
public static KeyValueIterator<Windowed<GenericKey>, GenericRow> fetch( final ReadOnlySessionStore<GenericKey, GenericRow> store, final GenericKey key ) { Objects.requireNonNull(key, "key can't be null"); final List<ReadOnlySessionStore<GenericKey, GenericRow>> stores = getStores(store); final Function<ReadOnlySessionStore<GenericKey, GenericRow>, KeyValueIterator<Windowed<GenericKey>, GenericRow>> fetchFunc = sessionStore -> fetchUncached(sessionStore, key); return findFirstNonEmptyIterator(stores, fetchFunc); }
@Test public void shouldThrowException_InvalidStateStoreException() throws IllegalAccessException { when(provider.stores(any(), any())).thenReturn(ImmutableList.of(meteredSessionStore)); SERDES_FIELD.set(meteredSessionStore, serdes); when(serdes.rawKey(any())).thenReturn(BYTES); when(meteredSessionStore.wrapped()).thenReturn(sessionStore); when(sessionStore.fetch(any())).thenThrow( new InvalidStateStoreException("Invalid")); final Exception e = assertThrows( InvalidStateStoreException.class, () -> SessionStoreCacheBypass.fetch(store, SOME_KEY) ); assertThat(e.getMessage(), containsString("State store is not " + "available anymore and may have been migrated to another instance")); }
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { final long timestamp = TimeUnit.MILLISECONDS.toSeconds(clock.getTime()); for (Map.Entry<String, Gauge> entry : gauges.entrySet()) { reportGauge(timestamp, entry.getKey(), entry.getValue()); } for (Map.Entry<String, Counter> entry : counters.entrySet()) { reportCounter(timestamp, entry.getKey(), entry.getValue()); } for (Map.Entry<String, Histogram> entry : histograms.entrySet()) { reportHistogram(timestamp, entry.getKey(), entry.getValue()); } for (Map.Entry<String, Meter> entry : meters.entrySet()) { reportMeter(timestamp, entry.getKey(), entry.getValue()); } for (Map.Entry<String, Timer> entry : timers.entrySet()) { reportTimer(timestamp, entry.getKey(), entry.getValue()); } }
@Test public void reportsHistogramValues() throws Exception { final Histogram histogram = mock(Histogram.class); when(histogram.getCount()).thenReturn(1L); final Snapshot snapshot = mock(Snapshot.class); when(snapshot.getMax()).thenReturn(2L); when(snapshot.getMean()).thenReturn(3.0); when(snapshot.getMin()).thenReturn(4L); when(snapshot.getStdDev()).thenReturn(5.0); when(snapshot.getMedian()).thenReturn(6.0); when(snapshot.get75thPercentile()).thenReturn(7.0); when(snapshot.get95thPercentile()).thenReturn(8.0); when(snapshot.get98thPercentile()).thenReturn(9.0); when(snapshot.get99thPercentile()).thenReturn(10.0); when(snapshot.get999thPercentile()).thenReturn(11.0); when(histogram.getSnapshot()).thenReturn(snapshot); reporter.report(map(), map(), map("test.histogram", histogram), map(), map()); assertThat(fileContents("test.histogram.csv")) .isEqualTo(csv( "t,count,max,mean,min,stddev,p50,p75,p95,p98,p99,p999", "19910191,1,2,3.000000,4,5.000000,6.000000,7.000000,8.000000,9.000000,10.000000,11.000000" )); }
@Override public abstract int compare(@NonNull String id1, @NonNull String id2);
@Test public void testCompareCaseSensitiveEmail() { IdStrategy idStrategy = new IdStrategy.CaseSensitiveEmailAddress(); assertEquals(0, idStrategy.compare("john.smith@acme.org", "john.smith@acme.org")); assertEquals(0, idStrategy.compare("John.Smith@acme.org", "John.Smith@acme.org")); assertEquals(0, idStrategy.compare("John.Smith@ACME.org", "John.Smith@acme.org")); assertEquals(0, idStrategy.compare("John.Smith@acme.ORG", "John.Smith@acme.org")); assertEquals(0, idStrategy.compare("john.smith", "john.smith")); assertEquals(0, idStrategy.compare("John.Smith", "John.Smith")); assertEquals(0, idStrategy.compare("john@smith@acme.org", "john@smith@acme.org")); assertEquals(0, idStrategy.compare("John@Smith@acme.org", "John@Smith@acme.org")); assertTrue(idStrategy.compare("John.Smith@acme.org", "john.smith@acme.org") < 0); assertTrue(idStrategy.compare("john.smith@acme.org", "John.Smith@acme.org") > 0); }
@SafeVarargs public static <T> Traverser<T> traverseItems(T... items) { return traverseArray(items); }
@Test public void peek() { List<Integer> list = new ArrayList<>(); Traverser<Integer> t = traverseItems(1, 2, 3).peek(list::add); assertEquals(Integer.valueOf(1), t.next()); assertEquals(Integer.valueOf(2), t.next()); assertEquals(Integer.valueOf(3), t.next()); assertNull(t.next()); assertEquals(Arrays.asList(1, 2, 3), list); }
public ChannelFuture connect() { validate(); SocketAddress remoteAddress = this.remoteAddress; if (remoteAddress == null) { throw new IllegalStateException("remoteAddress not set"); } return doResolveAndConnect(remoteAddress, config.localAddress()); }
@Test public void testChannelFactoryFailureNotifiesPromise() throws Exception { final RuntimeException exception = new RuntimeException("newChannel crash"); final Bootstrap bootstrap = new Bootstrap() .handler(dummyHandler) .group(groupA) .channelFactory(new ChannelFactory<Channel>() { @Override public Channel newChannel() { throw exception; } }); ChannelFuture connectFuture = bootstrap.connect(LocalAddress.ANY); // Should fail with the RuntimeException. assertThat(connectFuture.await(10000), is(true)); assertThat(connectFuture.cause(), sameInstance((Throwable) exception)); assertThat(connectFuture.channel(), is(not(nullValue()))); }
@Override public Collection<String> listAllNamespace() { return ServiceManager.getInstance().getAllNamespaces(); }
@Test void testListAllNamespace() { assertEquals(1, serviceOperatorV2.listAllNamespace().size()); }
@Override public boolean apply(Collection<Member> members) { if (members.size() < minimumClusterSize) { return false; } int count = 0; long now = currentTimeMillis(); for (Member member : members) { if (!isAlivePerIcmp(member)) { continue; } if (member.localMember()) { count++; continue; } // apply and onHeartbeat are never executed concurrently Long latestTimestamp = latestHeartbeatPerMember.get(member); if (latestTimestamp == null) { continue; } if ((now - latestTimestamp) < heartbeatToleranceMillis) { count++; } } return count >= minimumClusterSize; }
@Test public void testRecentlyActiveSplitBrainProtectionFunction_splitBrainProtectionPresent_whenAsManyAsSplitBrainProtectionRecentlyActive() { splitBrainProtectionFunction = new RecentlyActiveSplitBrainProtectionFunction(splitBrainProtectionSize, 10000); // heartbeat each second for all members for 5 seconds heartbeat(5, 1000); assertTrue(splitBrainProtectionFunction.apply(subsetOfMembers(splitBrainProtectionSize))); }
@Override public byte[] serialize() { byte[] payloadData = null; if (this.payload != null) { this.payload.setParent(this); payloadData = this.payload.serialize(); } int headerLength = FIXED_HEADER_LENGTH + routingData.length; int payloadLength = 0; if (payloadData != null) { payloadLength = payloadData.length; } final byte[] data = new byte[headerLength + payloadLength]; final ByteBuffer bb = ByteBuffer.wrap(data); bb.put(this.nextHeader); bb.put(this.headerExtLength); bb.put(this.routingType); bb.put(this.segmentsLeft); bb.put(this.routingData, 0, routingData.length); if (payloadData != null) { bb.put(payloadData); } if (this.parent != null && this.parent instanceof IExtensionHeader) { ((IExtensionHeader) this.parent).setNextHeader(IPv6.PROTOCOL_ROUTING); } return data; }
@Test public void testSerialize() { Routing routing = new Routing(); routing.setNextHeader((byte) 0x11); routing.setHeaderExtLength((byte) 0x02); routing.setRoutingType((byte) 0x00); routing.setSegmntsLeft((byte) 0x03); routing.setRoutingData(routingData); routing.setPayload(udp); assertArrayEquals(routing.serialize(), bytePacket); }
@Override public double score(int[] truth, int[] prediction) { return of(truth, prediction); }
@Test public void testMeasure() { System.out.println("sensitivity"); int[] truth = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; int[] prediction = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; Sensitivity instance = new Sensitivity(); double expResult = 0.8333; double result = instance.score(truth, prediction); assertEquals(expResult, result, 1E-4); }
@Override public Map<String, String> getAllVariables() { return internalGetAllVariables(0, Collections.emptySet()); }
@Test void testGetAllVariablesWithOutExclusions() { MetricRegistry registry = NoOpMetricRegistry.INSTANCE; AbstractMetricGroup<?> group = new ProcessMetricGroup(registry, "host"); assertThat(group.getAllVariables()).containsKey(ScopeFormat.SCOPE_HOST); }
@Override public void startLocalizer(LocalizerStartContext ctx) throws IOException, InterruptedException { Path nmPrivateContainerTokensPath = ctx.getNmPrivateContainerTokens(); InetSocketAddress nmAddr = ctx.getNmAddr(); String user = ctx.getUser(); String appId = ctx.getAppId(); String locId = ctx.getLocId(); LocalDirsHandlerService dirsHandler = ctx.getDirsHandler(); List<String> localDirs = dirsHandler.getLocalDirs(); List<String> logDirs = dirsHandler.getLogDirs(); createUserLocalDirs(localDirs, user); createUserCacheDirs(localDirs, user); createAppDirs(localDirs, user, appId); createAppLogDirs(appId, logDirs, user); // randomly choose the local directory Path appStorageDir = getWorkingDir(localDirs, user, appId); String tokenFn = String.format(TOKEN_FILE_NAME_FMT, locId); Path tokenDst = new Path(appStorageDir, tokenFn); copyFile(nmPrivateContainerTokensPath, tokenDst, user); LOG.info("Copying from {} to {}", nmPrivateContainerTokensPath, tokenDst); FileContext localizerFc = FileContext.getFileContext(lfs.getDefaultFileSystem(), getConf()); localizerFc.setUMask(lfs.getUMask()); localizerFc.setWorkingDirectory(appStorageDir); LOG.info("Localizer CWD set to {} = {}", appStorageDir, localizerFc.getWorkingDirectory()); ContainerLocalizer localizer = createContainerLocalizer(user, appId, locId, tokenFn, localDirs, localizerFc); // TODO: DO it over RPC for maintaining similarity? localizer.runLocalization(nmAddr); }
@Test(timeout = 30000) public void testStartLocalizer() throws IOException, InterruptedException, YarnException { final Path firstDir = new Path(BASE_TMP_PATH, "localDir1"); List<String> localDirs = new ArrayList<String>(); final Path secondDir = new Path(BASE_TMP_PATH, "localDir2"); List<String> logDirs = new ArrayList<String>(); final Path logDir = new Path(BASE_TMP_PATH, "logDir"); final Path tokenDir = new Path(BASE_TMP_PATH, "tokenDir"); FsPermission perms = new FsPermission((short)0770); Configuration conf = new Configuration(); final FileContext mockLfs = spy(FileContext.getLocalFSFileContext(conf)); final FileContext.Util mockUtil = spy(mockLfs.util()); doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { return mockUtil; } }).when(mockLfs).util(); doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { Path dest = (Path) invocationOnMock.getArguments()[1]; if (dest.toString().contains(firstDir.toString())) { // throw an Exception when copy token to the first local dir // to simulate no space on the first drive throw new IOException("No space on this drive " + dest.toString()); } else { // copy token to the second local dir DataOutputStream tokenOut = null; try { Credentials credentials = new Credentials(); tokenOut = mockLfs.create(dest, EnumSet.of(CREATE, OVERWRITE)); credentials.writeTokenStorageToStream(tokenOut); } finally { if (tokenOut != null) { tokenOut.close(); } } } return null; }}).when(mockUtil).copy(any(Path.class), any(Path.class), anyBoolean(), anyBoolean()); doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { Path p = (Path) invocationOnMock.getArguments()[0]; // let second local directory return more free space than // first local directory if (p.toString().contains(firstDir.toString())) { return new FsStatus(2000, 2000, 0); } else { return new FsStatus(1000, 0, 1000); } } }).when(mockLfs).getFsStatus(any(Path.class)); DefaultContainerExecutor mockExec = spy(new DefaultContainerExecutor(mockLfs) { @Override public ContainerLocalizer createContainerLocalizer(String user, String appId, String locId, String tokenFileName, List<String> localDirs, FileContext localizerFc) throws IOException { // Spy on the localizer and make it return valid heart-beat // responses even though there is no real NodeManager. ContainerLocalizer localizer = super.createContainerLocalizer(user, appId, locId, tokenFileName, localDirs, localizerFc); ContainerLocalizer spyLocalizer = spy(localizer); LocalizationProtocol nmProxy = mock(LocalizationProtocol.class); try { when(nmProxy.heartbeat(isA(LocalizerStatus.class))).thenReturn( new MockLocalizerHeartbeatResponse(LocalizerAction.DIE, new ArrayList<ResourceLocalizationSpec>())); } catch (YarnException e) { throw new IOException(e); } when(spyLocalizer.getProxy(any())) .thenReturn(nmProxy); return spyLocalizer; } }); mockExec.setConf(conf); localDirs.add(mockLfs.makeQualified(firstDir).toString()); localDirs.add(mockLfs.makeQualified(secondDir).toString()); logDirs.add(mockLfs.makeQualified(logDir).toString()); conf.setStrings(YarnConfiguration.NM_LOCAL_DIRS, localDirs.toArray(new String[localDirs.size()])); conf.set(YarnConfiguration.NM_LOG_DIRS, logDir.toString()); mockLfs.mkdir(tokenDir, perms, true); Path nmPrivateCTokensPath = new Path(tokenDir, "test.tokens"); String appSubmitter = "nobody"; String appId = "APP_ID"; String locId = "LOC_ID"; LocalDirsHandlerService dirsHandler = mock(LocalDirsHandlerService.class); when(dirsHandler.getLocalDirs()).thenReturn(localDirs); when(dirsHandler.getLogDirs()).thenReturn(logDirs); try { mockExec.startLocalizer(new LocalizerStartContext.Builder() .setNmPrivateContainerTokens(nmPrivateCTokensPath) .setNmAddr(null) .setUser(appSubmitter) .setAppId(appId) .setLocId(locId) .setDirsHandler(dirsHandler) .build()); } catch (IOException e) { Assert.fail("StartLocalizer failed to copy token file: " + StringUtils.stringifyException(e)); } finally { mockExec.deleteAsUser(new DeletionAsUserContext.Builder() .setUser(appSubmitter) .setSubDir(firstDir) .build()); mockExec.deleteAsUser(new DeletionAsUserContext.Builder() .setUser(appSubmitter) .setSubDir(secondDir) .build()); mockExec.deleteAsUser(new DeletionAsUserContext.Builder() .setUser(appSubmitter) .setSubDir(logDir) .build()); deleteTmpFiles(); } // Verify that the calls happen the expected number of times verify(mockUtil, times(1)).copy(any(Path.class), any(Path.class), anyBoolean(), anyBoolean()); verify(mockLfs, times(2)).getFsStatus(any(Path.class)); }
@Override public <T> T convert(DataTable dataTable, Type type) { return convert(dataTable, type, false); }
@Test void convert_to_object__more_then_one_item__throws_exception() { DataTable table = parse("", "| ♘ |", "| ♝ |"); registry.defineDataTableType(new DataTableType(Piece.class, PIECE_TABLE_CELL_TRANSFORMER)); CucumberDataTableException exception = assertThrows( CucumberDataTableException.class, () -> converter.convert(table, Piece.class)); assertThat(exception.getMessage(), is(format("" + "Can't convert DataTable to %s. " + "The table contained more then one item: [♘, ♝]", typeName(Piece.class)))); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStart, final Range<Instant> windowEnd, final Optional<Position> position ) { try { final ReadOnlySessionStore<GenericKey, GenericRow> store = stateStore .store(QueryableStoreTypes.sessionStore(), partition); return KsMaterializedQueryResult.rowIterator( findSession(store, key, windowStart, windowEnd).iterator()); } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldReturnAllSessionsForRangeAll() { // Given: givenSingleSession(Instant.now().minusSeconds(1000), Instant.now().plusSeconds(1000)); givenSingleSession(Instant.now().minusSeconds(1000), Instant.now().plusSeconds(1000)); // When: final Iterator<WindowedRow> rowIterator = table.get(A_KEY, PARTITION, Range.all(), Range.all()).rowIterator; // Then: assertThat(rowIterator.hasNext(), is(true)); final List<WindowedRow> resultList = Lists.newArrayList(rowIterator); assertThat(resultList, hasSize(2)); }
static String headerLine(CSVFormat csvFormat) { return String.join(String.valueOf(csvFormat.getDelimiter()), csvFormat.getHeader()); }
@Test public void givenQuoteModeNone_isNoop() { CSVFormat csvFormat = csvFormat().withEscape('$').withQuoteMode(QuoteMode.NONE); PCollection<String> input = pipeline.apply(Create.of(headerLine(csvFormat), "a,1,1.1", "b,2,2.2", "c,3,3.3")); CsvIOStringToCsvRecord underTest = new CsvIOStringToCsvRecord(csvFormat); CsvIOParseResult<List<String>> result = input.apply(underTest); PAssert.that(result.getOutput()) .containsInAnyOrder( Arrays.asList( Arrays.asList("a", "1", "1.1"), Arrays.asList("b", "2", "2.2"), Arrays.asList("c", "3", "3.3"))); PAssert.that(result.getErrors()).empty(); pipeline.run(); }
static void format(final JavaInput javaInput, JavaOutput javaOutput, JavaFormatterOptions options) throws FormatterException { Context context = new Context(); DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>(); context.put(DiagnosticListener.class, diagnostics); Options.instance(context).put("allowStringFolding", "false"); Options.instance(context).put("--enable-preview", "true"); JCCompilationUnit unit; JavacFileManager fileManager = new JavacFileManager(context, true, UTF_8); try { fileManager.setLocation(StandardLocation.PLATFORM_CLASS_PATH, ImmutableList.of()); } catch (IOException e) { // impossible throw new IOError(e); } SimpleJavaFileObject source = new SimpleJavaFileObject(URI.create("source"), JavaFileObject.Kind.SOURCE) { @Override public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException { return javaInput.getText(); } }; Log.instance(context).useSource(source); ParserFactory parserFactory = ParserFactory.instance(context); JavacParser parser = parserFactory.newParser( javaInput.getText(), /* keepDocComments= */ true, /* keepEndPos= */ true, /* keepLineMap= */ true); unit = parser.parseCompilationUnit(); unit.sourcefile = source; javaInput.setCompilationUnit(unit); Iterable<Diagnostic<? extends JavaFileObject>> errorDiagnostics = Iterables.filter(diagnostics.getDiagnostics(), Formatter::errorDiagnostic); if (!Iterables.isEmpty(errorDiagnostics)) { throw FormatterException.fromJavacDiagnostics(errorDiagnostics); } OpsBuilder builder = new OpsBuilder(javaInput, javaOutput); // Output the compilation unit. JavaInputAstVisitor visitor; if (Runtime.version().feature() >= 21) { visitor = createVisitor( "com.google.googlejavaformat.java.java21.Java21InputAstVisitor", builder, options); } else if (Runtime.version().feature() >= 17) { visitor = createVisitor( "com.google.googlejavaformat.java.java17.Java17InputAstVisitor", builder, options); } else { visitor = new JavaInputAstVisitor(builder, options.indentationMultiplier()); } visitor.scan(unit, null); builder.sync(javaInput.getText().length()); builder.drain(); Doc doc = new DocBuilder().withOps(builder.build()).build(); doc.computeBreaks(javaOutput.getCommentsHelper(), MAX_LINE_LENGTH, new Doc.State(+0, 0)); doc.write(javaOutput); javaOutput.flush(); }
@Test public void testFormatLengthUpToEOF() throws Exception { String input = "class Foo{\n" + "void f\n" + "() {\n" + "}\n" + "}\n\n\n\n\n\n"; String expectedOutput = "class Foo {\n" + " void f() {}\n" + "}\n"; Path tmpdir = testFolder.newFolder().toPath(); Path path = tmpdir.resolve("Foo.java"); Files.writeString(path, input); StringWriter out = new StringWriter(); StringWriter err = new StringWriter(); Main main = new Main(new PrintWriter(out, true), new PrintWriter(err, true), System.in); String[] args = {"--offset", "0", "--length", String.valueOf(input.length()), path.toString()}; assertThat(main.format(args)).isEqualTo(0); assertThat(out.toString()).isEqualTo(expectedOutput); }
@Override protected String selectorHandler(final MetaDataRegisterDTO metaDataDTO) { return ""; }
@Test public void testSelectorHandler() { MetaDataRegisterDTO metaDataRegisterDTO = MetaDataRegisterDTO.builder().build(); assertEquals(StringUtils.EMPTY, shenyuClientRegisterMotanService.selectorHandler(metaDataRegisterDTO)); }
@Override public Path copy(final Path source, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { try { final Storage.Objects.Get request = session.getClient().objects().get(containerService.getContainer(source).getName(), containerService.getKey(source)); if(containerService.getContainer(containerService.getContainer(source)).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) { request.setUserProject(session.getHost().getCredentials().getUsername()); } if(StringUtils.isNotBlank(source.attributes().getVersionId())) { request.setGeneration(Long.parseLong(source.attributes().getVersionId())); } final StorageObject storageObject = request.execute(); if(null != status.getModified()) { storageObject.setCustomTime(new DateTime(status.getModified())); } final Storage.Objects.Rewrite rewrite = session.getClient().objects().rewrite(containerService.getContainer(source).getName(), containerService.getKey(source), containerService.getContainer(target).getName(), containerService.getKey(target), storageObject); if(containerService.getContainer(source).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) { rewrite.setUserProject(session.getHost().getCredentials().getUsername()); } RewriteResponse response; do { response = rewrite.execute(); // Include this field (from the previous rewrite response) on each rewrite request after the first one, // until the rewrite response 'done' flag is true. rewrite.setRewriteToken(response.getRewriteToken()); } while(!response.getDone()); listener.sent(status.getLength()); return target.withAttributes(new GoogleStorageAttributesFinderFeature(session).toAttributes(response.getResource())); } catch(IOException e) { throw new GoogleStorageExceptionMappingService().map("Cannot copy {0}", e, source); } }
@Test public void testCopyFileZeroLength() throws Exception { final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new Path(container, new AsciiRandomStringService().random(), EnumSet.of(Path.Type.file)); test.attributes().setSize(0L); new GoogleStorageTouchFeature(session).touch(test, new TransferStatus().withMime("application/cyberduck").withMetadata(Collections.singletonMap("cyberduck", "set"))); final Path copy = new Path(container, new AsciiRandomStringService().random(), EnumSet.of(Path.Type.file)); final GoogleStorageCopyFeature feature = new GoogleStorageCopyFeature(session); assertTrue(feature.isSupported(test, copy)); feature.copy(test, copy, new TransferStatus(), new DisabledConnectionCallback(), new DisabledStreamListener()); assertTrue(new GoogleStorageFindFeature(session).find(test)); new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertTrue(new GoogleStorageFindFeature(session).find(copy)); assertEquals("set", new GoogleStorageMetadataFeature(session).getMetadata(copy).get("cyberduck")); new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(copy), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public Iterator<Optional<Page>> process(SqlFunctionProperties properties, DriverYieldSignal yieldSignal, LocalMemoryContext memoryContext, Page page) { WorkProcessor<Page> processor = createWorkProcessor(properties, yieldSignal, memoryContext, page); return processor.yieldingIterator(); }
@Test public void testExpressionProfiler() { MetadataManager metadata = createTestMetadataManager(); CallExpression add10Expression = call( ADD.name(), metadata.getFunctionAndTypeManager().resolveOperator(ADD, fromTypes(BIGINT, BIGINT)), BIGINT, field(0, BIGINT), constant(10L, BIGINT)); TestingTicker testingTicker = new TestingTicker(); PageFunctionCompiler functionCompiler = new PageFunctionCompiler(metadata, 0); Supplier<PageProjection> projectionSupplier = functionCompiler.compileProjection(SESSION.getSqlFunctionProperties(), add10Expression, Optional.empty()); PageProjection projection = projectionSupplier.get(); Page page = new Page(createLongSequenceBlock(1, 11)); ExpressionProfiler profiler = new ExpressionProfiler(testingTicker, SPLIT_RUN_QUANTA); for (int i = 0; i < 100; i++) { profiler.start(); Work<List<Block>> work = projection.project(SESSION.getSqlFunctionProperties(), new DriverYieldSignal(), page, SelectedPositions.positionsRange(0, page.getPositionCount())); if (i < 10) { // increment the ticker with a large value to mark the expression as expensive testingTicker.increment(10, SECONDS); profiler.stop(page.getPositionCount()); assertTrue(profiler.isExpressionExpensive()); } else { testingTicker.increment(0, NANOSECONDS); profiler.stop(page.getPositionCount()); assertFalse(profiler.isExpressionExpensive()); } work.process(); } }
public static UUID fastUUID() { return randomUUID(false); }
@Test public void fastUUIDTest(){ Set<String> set = new ConcurrentHashSet<>(100); ThreadUtil.concurrencyTest(100, ()-> set.add(UUID.fastUUID().toString())); assertEquals(100, set.size()); }
@Override public void writeBytes(Slice source) { writeBytes(source, 0, source.length()); }
@Test public void testWriteBytesEmptySlice() { OrcOutputBuffer orcOutputBuffer = createOrcOutputBuffer(new DataSize(256, KILOBYTE)); orcOutputBuffer.writeBytes(EMPTY_SLICE); // EMPTY_SLICE has null byte buffer assertCompressedContent(orcOutputBuffer, new byte[0], ImmutableList.of()); orcOutputBuffer = createOrcOutputBuffer(new DataSize(256, KILOBYTE)); orcOutputBuffer.writeBytes(EMPTY_SLICE, 0, 0); assertCompressedContent(orcOutputBuffer, new byte[0], ImmutableList.of()); }
public Enumeration<String> getAttributeNames() { return Collections.enumeration(parent.context().keySet()); }
@Test void testGetAttributeNames() { URI uri = URI.create("http://localhost:8080/test"); HttpRequest httpReq = newRequest(uri, HttpRequest.Method.GET, HttpRequest.Version.HTTP_1_1); DiscFilterRequest request = new DiscFilterRequest(httpReq); request.setAttribute("some_attr_1", "some_value1"); request.setAttribute("some_attr_2", "some_value2"); Enumeration<String> e = request.getAttributeNames(); List<String> attrList = Collections.list(e); assertEquals(2, attrList.size()); assertTrue(attrList.contains("some_attr_1")); assertTrue(attrList.contains("some_attr_2")); }
@Override public <R> HoodieData<HoodieRecord<R>> tagLocation( HoodieData<HoodieRecord<R>> records, HoodieEngineContext context, HoodieTable hoodieTable) { return HoodieJavaRDD.of(HoodieJavaRDD.getJavaRDD(records) .mapPartitionsWithIndex(locationTagFunction(hoodieTable.getMetaClient()), true)); }
@Test public void testTagLocationAndDuplicateUpdate() throws Exception { final String newCommitTime = "001"; final int numRecords = 10; List<HoodieRecord> records = dataGen.generateInserts(newCommitTime, numRecords); JavaRDD<HoodieRecord> writeRecords = jsc().parallelize(records, 1); // Load to memory HoodieWriteConfig config = getConfig(); SparkHoodieHBaseIndex index = new SparkHoodieHBaseIndex(config); try (SparkRDDWriteClient writeClient = getHoodieWriteClient(config)) { writeClient.startCommitWithTime(newCommitTime); metaClient = HoodieTableMetaClient.reload(metaClient); HoodieTable hoodieTable = HoodieSparkTable.create(config, context, metaClient); JavaRDD<WriteStatus> writeStatues = writeClient.upsert(writeRecords, newCommitTime); tagLocation(index, writeRecords, hoodieTable); // Duplicate upsert and ensure correctness is maintained // We are trying to approximately imitate the case when the RDD is recomputed. For RDD creating, driver code is not // recomputed. This includes the state transitions. We need to delete the inflight instance so that subsequent // upsert will not run into conflicts. metaClient.getStorage().deleteDirectory( new StoragePath(metaClient.getMetaPath(), "001.inflight")); writeClient.upsert(writeRecords, newCommitTime); assertNoWriteErrors(writeStatues.collect()); // Now commit this & update location of records inserted and validate no errors writeClient.commit(newCommitTime, writeStatues); // Now tagLocation for these records, hbaseIndex should tag them correctly metaClient = HoodieTableMetaClient.reload(metaClient); hoodieTable = HoodieSparkTable.create(config, context, metaClient); List<HoodieRecord> taggedRecords = tagLocation(index, writeRecords, hoodieTable).collect(); assertEquals(numRecords, taggedRecords.stream().filter(HoodieRecord::isCurrentLocationKnown).count()); assertEquals(numRecords, taggedRecords.stream().map(record -> record.getKey().getRecordKey()).distinct().count()); assertEquals(numRecords, taggedRecords.stream().filter(record -> (record.getCurrentLocation() != null && record.getCurrentLocation().getInstantTime().equals(newCommitTime))).distinct().count()); } }
RequestQueue<WriteRequest> getWriteRequestQueue(FileIOChannel.ID channelID) { return this.writers[channelID.getThreadNum()].requestQueue; }
@Test void testExceptionPropagationWriter() throws Exception { // use atomic boolean as a boolean reference final AtomicBoolean handlerCalled = new AtomicBoolean(); final AtomicBoolean exceptionForwarded = new AtomicBoolean(); WriteRequest req = new WriteRequest() { @Override public void requestDone(IOException ioex) { if (ioex instanceof TestIOException) { exceptionForwarded.set(true); } synchronized (handlerCalled) { handlerCalled.set(true); handlerCalled.notifyAll(); } } @Override public void write() throws IOException { throw new TestIOException(); } }; // test the read queue RequestQueue<WriteRequest> rq = ioManager.getWriteRequestQueue(ioManager.createChannel()); rq.add(req); // wait until the asynchronous request has been handled synchronized (handlerCalled) { while (!handlerCalled.get()) { handlerCalled.wait(); } } assertThat(exceptionForwarded).isTrue(); }
static JavaType constructType(Type type) { try { return constructTypeInner(type); } catch (Exception e) { throw new InvalidDataTableTypeException(type, e); } }
@Test void should_provide_canonical_representation_of_map_object_object() { JavaType javaType = TypeFactory.constructType(MAP_OF_OBJECT_OBJECT); assertThat(javaType.getTypeName(), is(MAP_OF_OBJECT_OBJECT.getTypeName())); }
public OpenAPI read(Class<?> cls) { return read(cls, resolveApplicationPath(), null, false, null, null, new LinkedHashSet<String>(), new ArrayList<Parameter>(), new HashSet<Class<?>>()); }
@Test(description = "Responses with filter") public void testParameterWithFilter() { Components components = new Components(); components.addParameters("id", new Parameter() .description("Id Description") .schema(new IntegerSchema()) .in(ParameterIn.QUERY.toString()) .example(1) .required(true)); OpenAPI oas = new OpenAPI() .info(new Info().description("info")) .components(components); Reader reader = new Reader(oas); OpenAPI openAPI = reader.read(SimpleParameterResource.class); OpenAPISpecFilter filterImpl = new RefParameterFilter(); SpecFilter f = new SpecFilter(); openAPI = f.filter(openAPI, filterImpl, null, null, null); String yaml = "openapi: 3.0.1\n" + "info:\n" + " description: info\n" + "paths:\n" + " /:\n" + " get:\n" + " summary: Simple get operation\n" + " description: Defines a simple get operation with a payload complex input object\n" + " operationId: sendPayload\n" + " parameters:\n" + " - $ref: '#/components/parameters/id'\n" + " responses:\n" + " default:\n" + " description: default response\n" + " content:\n" + " '*/*': {}\n" + " deprecated: true\n" + "components:\n" + " parameters: \n" + " id:\n" + " in: query\n" + " description: Id Description\n" + " required: true\n" + " schema:\n" + " type: integer\n" + " format: int32\n" + " example: 1\n"; SerializationMatchers.assertEqualsToYaml(openAPI, yaml); }
public static String camelize(String s) { StringBuilder sb = new StringBuilder(); String[] words = split(StringUtils.toLowerCase(s), ESCAPE_CHAR, '_'); for (String word : words) sb.append(org.apache.commons.lang3.StringUtils.capitalize(word)); return sb.toString(); }
@Test (timeout = 30000) public void testCamelize() { // common use cases assertEquals("Map", StringUtils.camelize("MAP")); assertEquals("JobSetup", StringUtils.camelize("JOB_SETUP")); assertEquals("SomeStuff", StringUtils.camelize("some_stuff")); // sanity checks for ascii alphabet against unexpected locale issues. assertEquals("Aa", StringUtils.camelize("aA")); assertEquals("Bb", StringUtils.camelize("bB")); assertEquals("Cc", StringUtils.camelize("cC")); assertEquals("Dd", StringUtils.camelize("dD")); assertEquals("Ee", StringUtils.camelize("eE")); assertEquals("Ff", StringUtils.camelize("fF")); assertEquals("Gg", StringUtils.camelize("gG")); assertEquals("Hh", StringUtils.camelize("hH")); assertEquals("Ii", StringUtils.camelize("iI")); assertEquals("Jj", StringUtils.camelize("jJ")); assertEquals("Kk", StringUtils.camelize("kK")); assertEquals("Ll", StringUtils.camelize("lL")); assertEquals("Mm", StringUtils.camelize("mM")); assertEquals("Nn", StringUtils.camelize("nN")); assertEquals("Oo", StringUtils.camelize("oO")); assertEquals("Pp", StringUtils.camelize("pP")); assertEquals("Qq", StringUtils.camelize("qQ")); assertEquals("Rr", StringUtils.camelize("rR")); assertEquals("Ss", StringUtils.camelize("sS")); assertEquals("Tt", StringUtils.camelize("tT")); assertEquals("Uu", StringUtils.camelize("uU")); assertEquals("Vv", StringUtils.camelize("vV")); assertEquals("Ww", StringUtils.camelize("wW")); assertEquals("Xx", StringUtils.camelize("xX")); assertEquals("Yy", StringUtils.camelize("yY")); assertEquals("Zz", StringUtils.camelize("zZ")); }
@Override public long computePullFromWhereWithException(MessageQueue mq) throws MQClientException { long result = -1; final ConsumeFromWhere consumeFromWhere = this.defaultMQPushConsumerImpl.getDefaultMQPushConsumer().getConsumeFromWhere(); final OffsetStore offsetStore = this.defaultMQPushConsumerImpl.getOffsetStore(); switch (consumeFromWhere) { case CONSUME_FROM_LAST_OFFSET_AND_FROM_MIN_WHEN_BOOT_FIRST: case CONSUME_FROM_MIN_OFFSET: case CONSUME_FROM_MAX_OFFSET: case CONSUME_FROM_LAST_OFFSET: { long lastOffset = offsetStore.readOffset(mq, ReadOffsetType.READ_FROM_STORE); if (lastOffset >= 0) { result = lastOffset; } // First start,no offset else if (-1 == lastOffset) { if (mq.getTopic().startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) { result = 0L; } else { try { result = this.mQClientFactory.getMQAdminImpl().maxOffset(mq); } catch (MQClientException e) { log.warn("Compute consume offset from last offset exception, mq={}, exception={}", mq, e); throw e; } } } else { throw new MQClientException(ResponseCode.QUERY_NOT_FOUND, "Failed to query consume offset from " + "offset store"); } break; } case CONSUME_FROM_FIRST_OFFSET: { long lastOffset = offsetStore.readOffset(mq, ReadOffsetType.READ_FROM_STORE); if (lastOffset >= 0) { result = lastOffset; } else if (-1 == lastOffset) { //the offset will be fixed by the OFFSET_ILLEGAL process result = 0L; } else { throw new MQClientException(ResponseCode.QUERY_NOT_FOUND, "Failed to query offset from offset " + "store"); } break; } case CONSUME_FROM_TIMESTAMP: { long lastOffset = offsetStore.readOffset(mq, ReadOffsetType.READ_FROM_STORE); if (lastOffset >= 0) { result = lastOffset; } else if (-1 == lastOffset) { if (mq.getTopic().startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX)) { try { result = this.mQClientFactory.getMQAdminImpl().maxOffset(mq); } catch (MQClientException e) { log.warn("Compute consume offset from last offset exception, mq={}, exception={}", mq, e); throw e; } } else { try { long timestamp = UtilAll.parseDate(this.defaultMQPushConsumerImpl.getDefaultMQPushConsumer().getConsumeTimestamp(), UtilAll.YYYYMMDDHHMMSS).getTime(); result = this.mQClientFactory.getMQAdminImpl().searchOffset(mq, timestamp); } catch (MQClientException e) { log.warn("Compute consume offset from last offset exception, mq={}, exception={}", mq, e); throw e; } } } else { throw new MQClientException(ResponseCode.QUERY_NOT_FOUND, "Failed to query offset from offset " + "store"); } break; } default: break; } if (result < 0) { throw new MQClientException(ResponseCode.SYSTEM_ERROR, "Found unexpected result " + result); } return result; }
@Test public void testComputePullFromWhereWithException_eq_minus1_last() throws MQClientException { when(offsetStore.readOffset(any(MessageQueue.class), any(ReadOffsetType.class))).thenReturn(-1L); consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_LAST_OFFSET); when(admin.maxOffset(any(MessageQueue.class))).thenReturn(12345L); assertEquals(12345L, rebalanceImpl.computePullFromWhereWithException(mq)); assertEquals(0L, rebalanceImpl.computePullFromWhereWithException(retryMq)); }
public static boolean isIPv6IPv4MappedAddress(final String input) { if (input.length() > SEVEN && input.substring(ZERO, SEVEN).equalsIgnoreCase(DOUBLE_COLON_FFFF)) { String lowerPart = input.substring(SEVEN); return isIPv4Address(lowerPart); } return false; }
@Test void isIPv6IPv4MappedAddress() { assertFalse(InetAddressValidator.isIPv6IPv4MappedAddress(":ffff:1.1.1.1")); assertTrue(InetAddressValidator.isIPv6IPv4MappedAddress("::FFFF:192.168.1.2")); }
@Override public synchronized UdfFactory getUdfFactory(final FunctionName functionName) { final UdfFactory udfFactory = udfs.get(functionName.text().toUpperCase()); if (udfFactory == null) { throw new KsqlException( "Can't find any functions with the name '" + functionName.text() + "'"); } return udfFactory; }
@Test public void shouldThrowExceptionIfNoFunctionsWithNameExist() { // When: final Exception e = assertThrows( KsqlException.class, () -> functionRegistry.getUdfFactory(of("foo_bar")) ); // Then: assertThat(e.getMessage(), containsString( "'foo_bar'")); }
@Override public void removeSink(McastRoute route, ConnectPoint connectPoint) { checkNotNull(route, "Route cannot be null"); checkNotNull(connectPoint, "Sink cannot be null"); store.storeSink(route, connectPoint, McastStore.Type.REMOVE); }
@Test public void testRemoveSink() { manager.addSource(r1, cp1); manager.addSink(r1, cp1); manager.addSink(r1, cp2); manager.removeSink(r1, cp2); validateEvents(McastEvent.Type.SOURCE_ADDED, McastEvent.Type.SINK_ADDED, McastEvent.Type.SINK_ADDED, McastEvent.Type.SINK_REMOVED); assertEquals("Route is not equal", Sets.newHashSet(cp1), manager.fetchSinks(r1)); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() < 1) { onInvalidDataReceived(device, data); return; } // Decode the new data int offset = 0; final int flags = data.getByte(offset); offset += 1; final boolean wheelRevPresent = (flags & 0x01) != 0; final boolean crankRevPreset = (flags & 0x02) != 0; if (data.size() < 1 + (wheelRevPresent ? 6 : 0) + (crankRevPreset ? 4 : 0)) { onInvalidDataReceived(device, data); return; } if (wheelRevPresent) { final long wheelRevolutions = data.getIntValue(Data.FORMAT_UINT32_LE, offset) & 0xFFFFFFFFL; offset += 4; final int lastWheelEventTime = data.getIntValue(Data.FORMAT_UINT16_LE, offset); // 1/1024 s offset += 2; if (mInitialWheelRevolutions < 0) mInitialWheelRevolutions = wheelRevolutions; // Notify listener about the new measurement onWheelMeasurementReceived(device, wheelRevolutions, lastWheelEventTime); } if (crankRevPreset) { final int crankRevolutions = data.getIntValue(Data.FORMAT_UINT16_LE, offset); offset += 2; final int lastCrankEventTime = data.getIntValue(Data.FORMAT_UINT16_LE, offset); // offset += 2; // Notify listener about the new measurement onCrankMeasurementReceived(device, crankRevolutions, lastCrankEventTime); } }
@Test public void onDistanceChanged() { final DataReceivedCallback callback = new CyclingSpeedAndCadenceMeasurementDataCallback() { @Override public void onInvalidDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { assertEquals("Correct CSC data reported as invalid", 1, 2); } @Override public void onDistanceChanged(@NonNull final BluetoothDevice device, final float totalDistance, final float distance, final float speed) { assertEquals("Total distance", 2 * 23.4f, totalDistance, 0.01); assertEquals("Distance", 23.4, distance, 0.01); assertEquals("Speed", 23.4f, speed, 0.01); } @Override public void onCrankDataChanged(@NonNull final BluetoothDevice device, final float crankCadence, final float gearRatio) { assertEquals("Crank data not available and reported", 1, 2); } }; final MutableData data = new MutableData(new byte[7]); // Flags assertTrue(data.setByte(0x01, 0)); // Wheel revolutions assertTrue(data.setValue(10, Data.FORMAT_UINT32_LE, 1)); assertTrue(data.setValue(0, Data.FORMAT_UINT16_LE, 5)); callback.onDataReceived(null, data); // Update wheel revolutions assertTrue(data.setValue(20, Data.FORMAT_UINT32_LE, 1)); assertTrue(data.setValue(1024, Data.FORMAT_UINT16_LE, 5)); // 1 second callback.onDataReceived(null, data); }
@Override public Endpoint<Http2LocalFlowController> local() { return localEndpoint; }
@SuppressWarnings("NumericOverflow") @Test public void localStreamInvalidStreamIdShouldThrow() { assertThrows(Http2Exception.class, new Executable() { @Override public void execute() throws Throwable { client.local().createStream(MAX_VALUE + 2, false); } }); }
public void update(final Account account) throws ContestedOptimisticLockException { joinAndUnwrapUpdateFuture(updateAsync(account)); }
@Test void testUpdate() { Device device = generateDevice(DEVICE_ID_1); Account account = generateAccount("+14151112222", UUID.randomUUID(), UUID.randomUUID(), List.of(device)); createAccount(account); assertPhoneNumberConstraintExists("+14151112222", account.getUuid()); assertPhoneNumberIdentifierConstraintExists(account.getPhoneNumberIdentifier(), account.getUuid()); device.setName("foobar".getBytes(StandardCharsets.UTF_8)); accounts.update(account); assertPhoneNumberConstraintExists("+14151112222", account.getUuid()); assertPhoneNumberIdentifierConstraintExists(account.getPhoneNumberIdentifier(), account.getUuid()); Optional<Account> retrieved = accounts.getByE164("+14151112222"); assertThat(retrieved.isPresent()).isTrue(); verifyStoredState("+14151112222", account.getUuid(), account.getPhoneNumberIdentifier(), null, retrieved.get(), account); retrieved = accounts.getByAccountIdentifier(account.getUuid()); assertThat(retrieved.isPresent()).isTrue(); verifyStoredState("+14151112222", account.getUuid(), account.getPhoneNumberIdentifier(), null, account, true); device = generateDevice(DEVICE_ID_1); Account unknownAccount = generateAccount("+14151113333", UUID.randomUUID(), UUID.randomUUID(), List.of(device)); assertThatThrownBy(() -> accounts.update(unknownAccount)).isInstanceOfAny(ConditionalCheckFailedException.class); accounts.update(account); assertThat(account.getVersion()).isEqualTo(2); verifyStoredState("+14151112222", account.getUuid(), account.getPhoneNumberIdentifier(), null, account, true); account.setVersion(1); assertThatThrownBy(() -> accounts.update(account)).isInstanceOfAny(ContestedOptimisticLockException.class); account.setVersion(2); accounts.update(account); verifyStoredState("+14151112222", account.getUuid(), account.getPhoneNumberIdentifier(), null, account, true); }
@Override @MethodNotAvailable public Object executeOnKey(K key, com.hazelcast.map.EntryProcessor entryProcessor) { throw new MethodNotAvailableException(); }
@Test(expected = MethodNotAvailableException.class) public void testExecuteOnKey() { adapter.executeOnKey(23, new IMapReplaceEntryProcessor("value", "newValue")); }
@SuppressWarnings("java:S108") public static void closeQuietly(AutoCloseable closeable) { if (closeable == null) { return; } try { closeable.close(); } catch (Exception ignore) { } }
@Test public void test_closeQuietly_whenException() throws Exception { Closeable closeable = mock(Closeable.class); doThrow(new IOException("expected")).when(closeable).close(); closeQuietly(closeable); verify(closeable).close(); verifyNoMoreInteractions(closeable); }
@Override public List<StreamStateHandle> duplicate(List<StreamStateHandle> stateHandles) throws IOException { final List<CopyRequest> requests = new ArrayList<>(); for (StreamStateHandle handle : stateHandles) { if (!(handle instanceof FileStateHandle)) { throw new IllegalArgumentException("We can duplicate only FileStateHandles."); } final Path srcPath = ((FileStateHandle) handle).getFilePath(); requests.add(CopyRequest.of(srcPath, getNewDstPath(srcPath.getName()))); } fs.copyFiles(requests, new CloseableRegistry()); return IntStream.range(0, stateHandles.size()) .mapToObj( idx -> { final StreamStateHandle originalHandle = stateHandles.get(idx); final Path dst = requests.get(idx).getDestination(); if (originalHandle instanceof RelativeFileStateHandle) { return new RelativeFileStateHandle( dst, dst.getName(), originalHandle.getStateSize()); } else { return new FileStateHandle(dst, originalHandle.getStateSize()); } }) .collect(Collectors.toList()); }
@Test void testDuplicating() throws IOException { final TestDuplicatingFileSystem fs = new TestDuplicatingFileSystem(); final FsCheckpointStateToolset stateToolset = new FsCheckpointStateToolset(new Path("test-path"), fs); final List<StreamStateHandle> duplicated = stateToolset.duplicate( Arrays.asList( new FileStateHandle(new Path("old-test-path", "test-file1"), 0), new FileStateHandle(new Path("old-test-path", "test-file2"), 0), new RelativeFileStateHandle( new Path("old-test-path", "test-file3"), "test-file3", 0))); assertThat(duplicated) .containsExactly( new FileStateHandle(new Path("test-path", "test-file1"), 0), new FileStateHandle(new Path("test-path", "test-file2"), 0), new RelativeFileStateHandle( new Path("test-path", "test-file3"), "test-file3", 0)); }
@Override public Hedge hedge(final String name) { return hedge(name, getDefaultConfig(), emptyMap()); }
@Test public void hedgeNewWithNullName() { exception.expect(NullPointerException.class); exception.expectMessage(NAME_MUST_NOT_BE_NULL); HedgeRegistry registry = HedgeRegistry.builder().withDefaultConfig(config).build(); registry.hedge(null); }
@Override public void validateDictDataList(String dictType, Collection<String> values) { if (CollUtil.isEmpty(values)) { return; } Map<String, DictDataDO> dictDataMap = CollectionUtils.convertMap( dictDataMapper.selectByDictTypeAndValues(dictType, values), DictDataDO::getValue); // 校验 values.forEach(value -> { DictDataDO dictData = dictDataMap.get(value); if (dictData == null) { throw exception(DICT_DATA_NOT_EXISTS); } if (!CommonStatusEnum.ENABLE.getStatus().equals(dictData.getStatus())) { throw exception(DICT_DATA_NOT_ENABLE, dictData.getLabel()); } }); }
@Test public void testValidateDictDataList_notEnable() { // mock 数据 DictDataDO dictDataDO = randomDictDataDO().setStatus(CommonStatusEnum.DISABLE.getStatus()); dictDataMapper.insert(dictDataDO); // 准备参数 String dictType = dictDataDO.getDictType(); List<String> values = singletonList(dictDataDO.getValue()); // 调用, 并断言异常 assertServiceException(() -> dictDataService.validateDictDataList(dictType, values), DICT_DATA_NOT_ENABLE, dictDataDO.getLabel()); }
@Override public PostDO getPost(Long id) { return postMapper.selectById(id); }
@Test public void testGetPost() { // mock 数据 PostDO dbPostDO = randomPostDO(); postMapper.insert(dbPostDO); // 准备参数 Long id = dbPostDO.getId(); // 调用 PostDO post = postService.getPost(id); // 断言 assertNotNull(post); assertPojoEquals(dbPostDO, post); }
public Type parse(final String schema) { try { final TypeContext typeContext = parseTypeContext(schema); return getType(typeContext); } catch (final ParsingException e) { throw new KsqlStatementException( "Failed to parse schema", "Failed to parse: " + schema, schema, KsqlStatementException.Problem.STATEMENT, e ); } }
@Test public void shouldThrowMeaningfulErrorOnBadStructDeclaration() { // Given: final String schemaString = "STRUCT<foo VARCHAR,>"; // When: final KsqlStatementException e = assertThrows( KsqlStatementException.class, () -> parser.parse(schemaString) ); // Then: System.out.println(e.getMessage()); assertThat(e.getUnloggedMessage(), is( "Failed to parse: STRUCT<foo VARCHAR,>\nStatement: STRUCT<foo VARCHAR,>" )); assertThat(e.getMessage(), is( "Failed to parse schema" )); assertThat(e.getCause().getMessage(), is( "line 1:20: Syntax error at line 1:20" )); }
public PathMatcher createPathMatcher(String syntaxAndPattern) { return PathMatchers.getPathMatcher( syntaxAndPattern, type.getSeparator() + type.getOtherSeparators(), equalityUsesCanonicalForm ? canonicalNormalizations : displayNormalizations); }
@Test public void testPathMatcher() { assertThat(service.createPathMatcher("regex:foo")) .isInstanceOf(PathMatchers.RegexPathMatcher.class); assertThat(service.createPathMatcher("glob:foo")) .isInstanceOf(PathMatchers.RegexPathMatcher.class); }
public static CreateSourceProperties from(final Map<String, Literal> literals) { try { return new CreateSourceProperties(literals, DurationParser::parse, false); } catch (final ConfigException e) { final String message = e.getMessage().replace( "configuration", "property" ); throw new KsqlException(message, e); } }
@Test public void shouldThrowOnConstructionOnUnknownWindowType() { // Given: final Map<String, Literal> props = ImmutableMap.<String, Literal>builder() .putAll(MINIMUM_VALID_PROPS) .put(CreateConfigs.WINDOW_TYPE_PROPERTY, new StringLiteral("Unknown")) .build(); // When: final Exception e = assertThrows( KsqlException.class, () -> CreateSourceProperties.from(props) ); // Then: assertThat(e.getMessage(), containsString("Invalid value Unknown for property WINDOW_TYPE: " + "String must be one of: SESSION, HOPPING, TUMBLING, null")); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ShowSingleTableStatement sqlStatement, final ContextManager contextManager) { Collection<DataNode> resultDataNodes = getPattern(sqlStatement) .map(optional -> getDataNodesWithLikePattern(rule.getAttributes().getAttribute(DataNodeRuleAttribute.class).getAllDataNodes(), optional)) .orElseGet(() -> getDataNodes(rule.getAttributes().getAttribute(DataNodeRuleAttribute.class).getAllDataNodes(), sqlStatement)); Collection<DataNode> sortedDataNodes = resultDataNodes.stream().sorted(Comparator.comparing(DataNode::getTableName)).collect(Collectors.toList()); return sortedDataNodes.stream().map(each -> new LocalDataQueryResultRow(each.getTableName(), each.getDataSourceName())).collect(Collectors.toList()); }
@Test void assertGetSingleTableWithLikeLiteral() throws SQLException { engine = setUp(new ShowSingleTableStatement(null, "%item", null)); engine.executeQuery(); Collection<LocalDataQueryResultRow> actual = engine.getRows(); assertThat(actual.size(), is(1)); Iterator<LocalDataQueryResultRow> iterator = actual.iterator(); LocalDataQueryResultRow row = iterator.next(); assertThat(row.getCell(1), is("t_order_item")); assertThat(row.getCell(2), is("ds_2")); }
@Override @MethodNotAvailable public boolean evict(K key) { throw new MethodNotAvailableException(); }
@Test(expected = MethodNotAvailableException.class) public void testEvict() { adapter.evict(23); }
public static Timestamp toTimestamp(BigDecimal bigDecimal) { final BigDecimal nanos = bigDecimal.remainder(BigDecimal.ONE.scaleByPowerOfTen(9)); final BigDecimal seconds = bigDecimal.subtract(nanos).scaleByPowerOfTen(-9).add(MIN_SECONDS); return Timestamp.ofTimeSecondsAndNanos(seconds.longValue(), nanos.intValue()); }
@Test public void testToTimestampConvertNanosToTimestampMax() { assertEquals( Timestamp.MAX_VALUE, TimestampUtils.toTimestamp(new BigDecimal("315537897599999999999"))); }
@Nonnull @Beta public JobConfig addCustomClasspath(@Nonnull String name, @Nonnull String path) { throwIfLocked(); List<String> classpathItems = customClassPaths.computeIfAbsent(name, (k) -> new ArrayList<>()); classpathItems.add(path); return this; }
@Test public void addCustomClasspath() { JobConfig jobConfig = new JobConfig(); jobConfig.addCustomClasspath("test", "url1"); jobConfig.addCustomClasspath("test", "url2"); assertThat(jobConfig.getCustomClassPaths()).containsValue( newArrayList("url1", "url2") ); }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName(name); Map<Object, Object> sortedProperties = new TreeMap<>(System.getProperties()); for (Map.Entry<Object, Object> systemProp : sortedProperties.entrySet()) { if (systemProp.getValue() != null) { setAttribute(protobuf, Objects.toString(systemProp.getKey()), Objects.toString(systemProp.getValue())); } } return protobuf.build(); }
@Test public void system_properties_are_returned_in_alphabetical_order() { ProtobufSystemInfo.Section section = underTest.toProtobuf(); List<String> keys = section.getAttributesList() .stream() .map(ProtobufSystemInfo.Attribute::getKey) .toList(); assertThat(keys).contains("java.vm.vendor", "os.name"); List<String> sortedKeys = new ArrayList<>(keys); Collections.sort(sortedKeys); assertThat(sortedKeys).isEqualTo(keys); }
public static Formatter forNumbers(@Nonnull String format) { return new NumberFormat(format); }
@Test public void testFeatureOrthogonality() { Formatter f = forNumbers("FM999th +.99"); check(421.35, f, "421st +.35"); f = forNumbers("FM999.99th"); check(421.35, f, "421.35st"); f = forNumbers("FM999V99 -> RN"); check(3.14, f, "314 -> CCCXIV"); }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("Search State"); try { setAttribute(protobuf, "State", getStateAsEnum().name()); completeNodeAttributes(protobuf); } catch (Exception es) { LoggerFactory.getLogger(EsStateSection.class).warn("Failed to retrieve ES attributes. There will be only a single \"state\" attribute.", es); setAttribute(protobuf, "State", es.getCause() instanceof ElasticsearchException ? es.getCause().getMessage() : es.getMessage()); } return protobuf.build(); }
@Test public void attributes_displays_exception_message_when_cause_is_not_ElasticSearchException_when_client_fails() { EsClient esClientMock = mock(EsClient.class); EsStateSection underTest = new EsStateSection(esClientMock); when(esClientMock.clusterHealth(any())).thenThrow(new RuntimeException("RuntimeException with cause not ES", new IllegalArgumentException("some cause message"))); ProtobufSystemInfo.Section section = underTest.toProtobuf(); assertThatAttributeIs(section, "State", "RuntimeException with cause not ES"); }
public static Object coerceValue(DMNType requiredType, Object valueToCoerce) { return (requiredType != null && valueToCoerce != null) ? actualCoerceValue(requiredType, valueToCoerce) : valueToCoerce; }
@Test void coerceValueDateToDateTimeConverted() { Object value = LocalDate.now(); DMNType requiredType = new SimpleTypeImpl("http://www.omg.org/spec/DMN/20180521/FEEL/", "date and time", null, false, null, null, null, BuiltInType.DATE_TIME); Object retrieved = CoerceUtil.coerceValue(requiredType, value); assertNotNull(retrieved); assertTrue(retrieved instanceof ZonedDateTime); ZonedDateTime zdtRetrieved = (ZonedDateTime)retrieved; assertEquals(value, zdtRetrieved.toLocalDate()); assertEquals(ZoneOffset.UTC, zdtRetrieved.getOffset()); assertEquals(0, zdtRetrieved.getHour()); assertEquals(0, zdtRetrieved.getMinute()); assertEquals(0, zdtRetrieved.getSecond()); }
@Bean @ConfigurationProperties(prefix = "shenyu") public ShenyuConfig shenyuConfig() { return new ShenyuConfig(); }
@Test public void testShenyuConfig() { applicationContextRunner.run(context -> { ShenyuConfig config = context.getBean("shenyuConfig", ShenyuConfig.class); assertNotNull(config); } ); }
@Override public TimestampedKeyValueStore<K, V> build() { KeyValueStore<Bytes, byte[]> store = storeSupplier.get(); if (!(store instanceof TimestampedBytesStore)) { if (store.persistent()) { store = new KeyValueToTimestampedKeyValueByteStoreAdapter(store); } else { store = new InMemoryTimestampedKeyValueStoreMarker(store); } } return new MeteredTimestampedKeyValueStore<>( maybeWrapCaching(maybeWrapLogging(store)), storeSupplier.metricsScope(), time, keySerde, valueSerde); }
@Test public void shouldHaveChangeLoggingStoreByDefault() { setUp(); final TimestampedKeyValueStore<String, String> store = builder.build(); assertThat(store, instanceOf(MeteredTimestampedKeyValueStore.class)); final StateStore next = ((WrappedStateStore) store).wrapped(); assertThat(next, instanceOf(ChangeLoggingTimestampedKeyValueBytesStore.class)); }
@Override public List<Integer> applyTransforms(List<Integer> originalGlyphIds) { List<Integer> intermediateGlyphsFromGsub = adjustRephPosition(originalGlyphIds); intermediateGlyphsFromGsub = repositionGlyphs(intermediateGlyphsFromGsub); for (String feature : FEATURES_IN_ORDER) { if (!gsubData.isFeatureSupported(feature)) { if (feature.equals(RKRF_FEATURE) && gsubData.isFeatureSupported(VATU_FEATURE)) { // Create your own rkrf feature from vatu feature intermediateGlyphsFromGsub = applyRKRFFeature( gsubData.getFeature(VATU_FEATURE), intermediateGlyphsFromGsub); } LOG.debug("the feature {} was not found", feature); continue; } LOG.debug("applying the feature {}", feature); ScriptFeature scriptFeature = gsubData.getFeature(feature); intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature, intermediateGlyphsFromGsub); } return Collections.unmodifiableList(intermediateGlyphsFromGsub); }
@Disabled @Test void testApplyTransforms_blws() { // given List<Integer> glyphsAfterGsub = Arrays.asList(660,663,336,584,336,583); // when List<Integer> result = gsubWorkerForDevanagari.applyTransforms(getGlyphIds("दृहृट्रूट्रु")); // then assertEquals(glyphsAfterGsub, result); }
public static Class<?> getHandler(final boolean failOnError) { return failOnError ? LogAndFailProductionExceptionHandler.class : LogAndContinueProductionExceptionHandler.class; }
@Test public void shouldReturnLogAndFailHandler() { assertThat( ProductionExceptionHandlerUtil.getHandler(true), equalTo(LogAndFailProductionExceptionHandler.class)); }
static Map<String, Comparable> prepareProperties(Map<String, Comparable> properties, Collection<PropertyDefinition> propertyDefinitions) { Map<String, Comparable> mappedProperties = createHashMap(propertyDefinitions.size()); for (PropertyDefinition propertyDefinition : propertyDefinitions) { String propertyKey = propertyDefinition.key(); if (properties.containsKey(propertyKey.replace("-", ""))) { properties.put(propertyKey, properties.remove(propertyKey.replace("-", ""))); } if (!properties.containsKey(propertyKey)) { if (!propertyDefinition.optional()) { throw new InvalidConfigurationException( String.format("Missing property '%s' on discovery strategy", propertyKey)); } continue; } Comparable value = properties.get(propertyKey); TypeConverter typeConverter = propertyDefinition.typeConverter(); Comparable mappedValue = typeConverter.convert(value); ValueValidator validator = propertyDefinition.validator(); if (validator != null) { validator.validate(mappedValue); } mappedProperties.put(propertyKey, mappedValue); } verifyNoUnknownProperties(mappedProperties, properties); return mappedProperties; }
@Test public void nullProperty() { // given Map<String, Comparable> properties = new HashMap<>(singletonMap(PROPERTY_KEY_1, null)); TypeConverter typeConverter = new TypeConverter() { @Override public Comparable convert(Comparable value) { return value == null ? "hazel" : "cast"; } }; Collection<PropertyDefinition> propertyDefinitions = singletonList( new SimplePropertyDefinition(PROPERTY_KEY_1, true, typeConverter)); // when Map<String, Comparable> result = prepareProperties(properties, propertyDefinitions); // then assertEquals("hazel", result.get(PROPERTY_KEY_1)); }
@Override public Acl getPermission(final Path file) throws BackgroundException { try { final Acl acl = new Acl(); if(containerService.isContainer(file)) { final BucketAccessControls controls = session.getClient().bucketAccessControls().list( containerService.getContainer(file).getName()).execute(); for(BucketAccessControl control : controls.getItems()) { final String entity = control.getEntity(); acl.addAll(this.toUser(entity, control.getEmail()), new Acl.Role(control.getRole())); } } else { final ObjectAccessControls controls = session.getClient().objectAccessControls().list(containerService.getContainer(file).getName(), containerService.getKey(file)).execute(); for(ObjectAccessControl control : controls.getItems()) { final String entity = control.getEntity(); acl.addAll(this.toUser(entity, control.getEmail()), this.toRole(control)); } } return acl; } catch(IOException e) { final BackgroundException failure = new GoogleStorageExceptionMappingService().map("Failure to read attributes of {0}", e, file); if(file.isDirectory()) { if(failure instanceof NotfoundException) { // No placeholder file may exist but we just have a common prefix return Acl.EMPTY; } } if(failure instanceof InteroperabilityException) { // The specified method is not allowed against this resource. The case for delete markers in versioned buckets. return Acl.EMPTY; } throw failure; } }
@Test public void testReadWithDelimiter() throws Exception { final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory)); final Path test = new GoogleStorageTouchFeature(session).touch(new Path(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final GoogleStorageAccessControlListFeature f = new GoogleStorageAccessControlListFeature(session); assertNotNull(f.getPermission(test)); new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static String[] split(String splittee, String splitChar, boolean truncate) { //NOSONAR if (splittee == null || splitChar == null) { return new String[0]; } final String EMPTY_ELEMENT = ""; int spot; final int splitLength = splitChar.length(); final String adjacentSplit = splitChar + splitChar; final int adjacentSplitLength = adjacentSplit.length(); if (truncate) { while ((spot = splittee.indexOf(adjacentSplit)) != -1) { splittee = splittee.substring(0, spot + splitLength) + splittee.substring(spot + adjacentSplitLength, splittee.length()); } if (splittee.startsWith(splitChar)) { splittee = splittee.substring(splitLength); } if (splittee.endsWith(splitChar)) { // Remove trailing splitter splittee = splittee.substring(0, splittee.length() - splitLength); } } List<String> returns = new ArrayList<>(); final int length = splittee.length(); // This is the new length int start = 0; spot = 0; while (start < length && (spot = splittee.indexOf(splitChar, start)) > -1) { if (spot > 0) { returns.add(splittee.substring(start, spot)); } else { returns.add(EMPTY_ELEMENT); } start = spot + splitLength; } if (start < length) { returns.add(splittee.substring(start)); } else if (spot == length - splitLength) {// Found splitChar at end of line returns.add(EMPTY_ELEMENT); } return returns.toArray(new String[returns.size()]); }
@Test public void testSplitStringStringTrueTruncate() throws Exception { assertThat(JOrphanUtils.split("a;,b;,;,;,d;,e;,;,f", ";,", true), CoreMatchers.equalTo(new String[]{"a", "b", "d", "e", "f"})); }
@VisibleForTesting void validateCaptcha(AuthLoginReqVO reqVO) { // 如果验证码关闭,则不进行校验 if (!captchaEnable) { return; } // 校验验证码 ValidationUtils.validate(validator, reqVO, AuthLoginReqVO.CodeEnableGroup.class); CaptchaVO captchaVO = new CaptchaVO(); captchaVO.setCaptchaVerification(reqVO.getCaptchaVerification()); ResponseModel response = captchaService.verification(captchaVO); // 验证不通过 if (!response.isSuccess()) { // 创建登录失败日志(验证码不正确) createLoginLog(null, reqVO.getUsername(), LoginLogTypeEnum.LOGIN_USERNAME, LoginResultEnum.CAPTCHA_CODE_ERROR); throw exception(AUTH_LOGIN_CAPTCHA_CODE_ERROR, response.getRepMsg()); } }
@Test public void testValidateCaptcha_successWithDisable() { // 准备参数 AuthLoginReqVO reqVO = randomPojo(AuthLoginReqVO.class); // mock 验证码关闭 ReflectUtil.setFieldValue(authService, "captchaEnable", false); // 调用,无需断言 authService.validateCaptcha(reqVO); }
@Override protected List<ParentRunner<?>> getChildren() { return children; }
@Test void cucumber_returns_description_tree_with_features_and_pickles() throws InitializationError { Description description = new Cucumber(ValidEmpty.class).getDescription(); assertThat(description.getDisplayName(), is("io.cucumber.junit.CucumberTest$ValidEmpty")); Description feature = description.getChildren().get(1); assertThat(feature.getDisplayName(), is("Feature A")); Description pickle = feature.getChildren().get(0); assertThat(pickle.getDisplayName(), is("A good start(Feature A)")); }
public static synchronized boolean isEmpty() { return CpeEcosystemCache.cache.isEmpty(); }
@Test public void testIsEmpty() { Map<Pair<String, String>, String> map = new HashMap<>(); CpeEcosystemCache.setCache(map); boolean expResult = true; boolean result = CpeEcosystemCache.isEmpty(); assertEquals(expResult, result); map.put(new Pair<>("apache", "zookeeper"), "MULTPILE"); expResult = false; result = CpeEcosystemCache.isEmpty(); assertEquals(expResult, result); }
@Override public String get() { return ua; }
@Test public void testGet() { assertTrue(new PreferencesUseragentProvider().get().startsWith("Cyberduck/")); }
public static long ipv4ToLong(String strIP) { final Matcher matcher = PatternPool.IPV4.matcher(strIP); if (matcher.matches()) { return matchAddress(matcher); } // Validator.validateIpv4(strIP, "Invalid IPv4 address!"); // final long[] ip = Convert.convert(long[].class, StrUtil.split(strIP, CharUtil.DOT)); // return (ip[0] << 24) + (ip[1] << 16) + (ip[2] << 8) + ip[3]; throw new IllegalArgumentException("Invalid IPv4 address!"); }
@Test public void ipv4ToLongWithDefaultTest() { final String strIP = "不正确的 IP 地址"; final long defaultValue = 0L; final long ipOfLong = Ipv4Util.ipv4ToLong(strIP, defaultValue); assertEquals(ipOfLong, defaultValue); final String strIP2 = "255.255.255.255"; final long defaultValue2 = 0L; final long ipOfLong2 = Ipv4Util.ipv4ToLong(strIP2, defaultValue2); assertEquals(ipOfLong2, 4294967295L); }
@Override public String named() { return PluginEnum.GLOBAL.getName(); }
@Test public void testNamed() { assertEquals(PluginEnum.GLOBAL.getName(), globalPlugin.named()); }