focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public ByteBuf writeByte(int value) { ensureWritable0(1); _setByte(writerIndex++, value); return this; }
@Test public void testWriteByteAfterRelease() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().writeByte(1); } }); }
@Override public CaseInsensitiveString getName() { return super.getName() == null ? pipelineName : super.getName(); }
@Test void shouldUsePipelineNameAsMaterialNameIfItIsNotSet() throws Exception { assertThat(new DependencyMaterial(new CaseInsensitiveString("pipeline1"), new CaseInsensitiveString("stage1")).getName()).isEqualTo(new CaseInsensitiveString("pipeline1")); }
@Timed @Path("/{destination}") @PUT @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) @ManagedAsync @Operation( summary = "Send a message", description = """ Deliver a message to a single recipient. May be authenticated or unauthenticated; if unauthenticated, an unidentifed-access key or group-send endorsement token must be provided, unless the message is a story. """) @ApiResponse(responseCode="200", description="Message was successfully sent", useReturnTypeSchema=true) @ApiResponse( responseCode="401", description="The message is not a story and the authorization, unauthorized access key, or group send endorsement token is missing or incorrect") @ApiResponse( responseCode="404", description="The message is not a story and some the recipient service ID does not correspond to a registered Signal user") @ApiResponse( responseCode = "409", description = "Incorrect set of devices supplied for recipient", content = @Content(schema = @Schema(implementation = AccountMismatchedDevices[].class))) @ApiResponse( responseCode = "410", description = "Mismatched registration ids supplied for some recipient devices", content = @Content(schema = @Schema(implementation = AccountStaleDevices[].class))) public Response sendMessage(@ReadOnly @Auth Optional<AuthenticatedDevice> source, @Parameter(description="The recipient's unidentified access key") @HeaderParam(HeaderUtils.UNIDENTIFIED_ACCESS_KEY) Optional<Anonymous> accessKey, @Parameter(description="A group send endorsement token covering the recipient. Must not be combined with `Unidentified-Access-Key` or set on a story message.") @HeaderParam(HeaderUtils.GROUP_SEND_TOKEN) @Nullable GroupSendTokenHeader groupSendToken, @HeaderParam(HttpHeaders.USER_AGENT) String userAgent, @Parameter(description="If true, deliver the message only to recipients that are online when it is sent") @PathParam("destination") ServiceIdentifier destinationIdentifier, @Parameter(description="If true, the message is a story; access tokens are not checked and sending to nonexistent recipients is permitted") @QueryParam("story") boolean isStory, @Parameter(description="The encrypted message payloads for each recipient device") @NotNull @Valid IncomingMessageList messages, @Context ContainerRequestContext context) throws RateLimitExceededException { final Sample sample = Timer.start(); try { if (source.isEmpty() && accessKey.isEmpty() && groupSendToken == null && !isStory) { throw new WebApplicationException(Response.Status.UNAUTHORIZED); } if (groupSendToken != null) { if (!source.isEmpty() || !accessKey.isEmpty()) { throw new BadRequestException("Group send endorsement tokens should not be combined with other authentication"); } else if (isStory) { throw new BadRequestException("Group send endorsement tokens should not be sent for story messages"); } } final String senderType; if (source.isPresent()) { if (source.get().getAccount().isIdentifiedBy(destinationIdentifier)) { senderType = SENDER_TYPE_SELF; } else { senderType = SENDER_TYPE_IDENTIFIED; } } else { senderType = SENDER_TYPE_UNIDENTIFIED; } boolean isSyncMessage = source.isPresent() && source.get().getAccount().isIdentifiedBy(destinationIdentifier); if (isSyncMessage && destinationIdentifier.identityType() == IdentityType.PNI) { throw new WebApplicationException(Status.FORBIDDEN); } Optional<Account> destination; if (!isSyncMessage) { destination = accountsManager.getByServiceIdentifier(destinationIdentifier); } else { destination = source.map(AuthenticatedDevice::getAccount); } final Optional<Response> spamCheck = spamChecker.checkForSpam( context, source.map(AuthenticatedDevice::getAccount), destination); if (spamCheck.isPresent()) { return spamCheck.get(); } final Optional<byte[]> spamReportToken = switch (senderType) { case SENDER_TYPE_IDENTIFIED -> reportSpamTokenProvider.makeReportSpamToken(context, source.get(), destination); default -> Optional.empty(); }; int totalContentLength = 0; for (final IncomingMessage message : messages.messages()) { int contentLength = 0; if (StringUtils.isNotEmpty(message.content())) { contentLength += message.content().length(); } validateContentLength(contentLength, false, userAgent); validateEnvelopeType(message.type(), userAgent); totalContentLength += contentLength; } try { rateLimiters.getInboundMessageBytes().validate(destinationIdentifier.uuid(), totalContentLength); } catch (final RateLimitExceededException e) { if (dynamicConfigurationManager.getConfiguration().getInboundMessageByteLimitConfiguration().enforceInboundLimit()) { messageByteLimitEstimator.add(destinationIdentifier.uuid().toString()); throw e; } } try { if (isStory) { // Stories will be checked by the client; we bypass access checks here for stories. } else if (groupSendToken != null) { checkGroupSendToken(List.of(destinationIdentifier.toLibsignal()), groupSendToken); if (destination.isEmpty()) { throw new NotFoundException(); } } else { OptionalAccess.verify(source.map(AuthenticatedDevice::getAccount), accessKey, destination, destinationIdentifier); } boolean needsSync = !isSyncMessage && source.isPresent() && source.get().getAccount().getDevices().size() > 1; // We return 200 when stories are sent to a non-existent account. Since story sends bypass OptionalAccess.verify // we leak information about whether a destination UUID exists if we return any other code (e.g. 404) from // these requests. if (isStory && destination.isEmpty()) { return Response.ok(new SendMessageResponse(needsSync)).build(); } // if destination is empty we would either throw an exception in OptionalAccess.verify when isStory is false // or else return a 200 response when isStory is true. assert destination.isPresent(); if (source.isPresent() && !isSyncMessage) { checkMessageRateLimit(source.get(), destination.get(), userAgent); } if (isStory) { rateLimiters.getStoriesLimiter().validate(destination.get().getUuid()); } final Set<Byte> excludedDeviceIds; if (isSyncMessage) { excludedDeviceIds = Set.of(source.get().getAuthenticatedDevice().getId()); } else { excludedDeviceIds = Collections.emptySet(); } DestinationDeviceValidator.validateCompleteDeviceList(destination.get(), messages.messages().stream().map(IncomingMessage::destinationDeviceId).collect(Collectors.toSet()), excludedDeviceIds); DestinationDeviceValidator.validateRegistrationIds(destination.get(), messages.messages(), IncomingMessage::destinationDeviceId, IncomingMessage::destinationRegistrationId, destination.get().getPhoneNumberIdentifier().equals(destinationIdentifier.uuid())); final String authType; if (SENDER_TYPE_IDENTIFIED.equals(senderType)) { authType = AUTH_TYPE_IDENTIFIED; } else if (isStory) { authType = AUTH_TYPE_STORY; } else if (groupSendToken != null) { authType = AUTH_TYPE_GROUP_SEND_TOKEN; } else { authType = AUTH_TYPE_ACCESS_KEY; } final List<Tag> tags = List.of(UserAgentTagUtil.getPlatformTag(userAgent), Tag.of(ENDPOINT_TYPE_TAG_NAME, ENDPOINT_TYPE_SINGLE), Tag.of(EPHEMERAL_TAG_NAME, String.valueOf(messages.online())), Tag.of(SENDER_TYPE_TAG_NAME, senderType), Tag.of(AUTH_TYPE_TAG_NAME, authType), Tag.of(IDENTITY_TYPE_TAG_NAME, destinationIdentifier.identityType().name())); for (IncomingMessage incomingMessage : messages.messages()) { Optional<Device> destinationDevice = destination.get().getDevice(incomingMessage.destinationDeviceId()); if (destinationDevice.isPresent()) { Metrics.counter(SENT_MESSAGE_COUNTER_NAME, tags).increment(); sendIndividualMessage( source, destination.get(), destinationDevice.get(), destinationIdentifier, messages.timestamp(), messages.online(), isStory, messages.urgent(), incomingMessage, userAgent, spamReportToken); } } return Response.ok(new SendMessageResponse(needsSync)).build(); } catch (MismatchedDevicesException e) { throw new WebApplicationException(Response.status(409) .type(MediaType.APPLICATION_JSON_TYPE) .entity(new MismatchedDevices(e.getMissingDevices(), e.getExtraDevices())) .build()); } catch (StaleDevicesException e) { throw new WebApplicationException(Response.status(410) .type(MediaType.APPLICATION_JSON) .entity(new StaleDevices(e.getStaleDevices())) .build()); } } finally { sample.stop(SEND_MESSAGE_LATENCY_TIMER); } }
@Test void testMultiDevice() throws Exception { try (final Response response = resources.getJerseyTest() .target(String.format("/v1/messages/%s", MULTI_DEVICE_UUID)) .request() .header("Authorization", AuthHelper.getAuthHeader(AuthHelper.VALID_UUID, AuthHelper.VALID_PASSWORD)) .put(Entity.entity(SystemMapper.jsonMapper().readValue(jsonFixture("fixtures/current_message_multi_device.json"), IncomingMessageList.class), MediaType.APPLICATION_JSON_TYPE))) { assertThat("Good Response Code", response.getStatus(), is(equalTo(200))); final ArgumentCaptor<Envelope> envelopeCaptor = ArgumentCaptor.forClass(Envelope.class); verify(messageSender, times(3)) .sendMessage(any(Account.class), any(Device.class), envelopeCaptor.capture(), eq(false)); envelopeCaptor.getAllValues().forEach(envelope -> assertTrue(envelope.getUrgent())); } }
public static Instant parseTime(String time) { ZonedDateTime zdt = ZonedDateTime.parse( // dateString.replace("-", "/") + " " + timeString + " Z", time + " Z", DATE_FORMATTER ); return Instant.from(zdt); }
@Test public void testParsingTime() { assertEquals(EPOCH, parseTime(EPOCH_AS_STRING)); assertEquals(SECOND_EXAMPLE_INSTANT, parseTime(SECOND_EXAMPLE_STRING)); }
public void start(Callback<None> callback) { _managerStarted = true; if (!_startupCallback.compareAndSet(null, callback)) { throw new IllegalStateException("Already starting"); } try { _zkConnection.start(); //Trying to start store here. If the connection is not ready, will return immediately. //The connection event will trigger the actual store startup tryStartStore(); LOG.info("Started ZooKeeper connection to {}", _zkConnectString); } catch (Exception e) { _startupCallback.set(null); callback.onError(e); } }
@Test (invocationCount = 1, timeOut = 15000) public void testWarmupDuringSessionExpiration() throws Exception { ScheduledExecutorService warmupExecutorService = Executors.newSingleThreadScheduledExecutor(); boolean isDarkWarmupEnabled = true; String warmupClusterName = "warmup" + _cluster; int warmupDuration = 5; //run warm up for 5 sec final double newWeight = 1.5d; ZooKeeperAnnouncer announcer = getZooKeeperWarmupAnnouncer(_cluster, _uri, WEIGHT, isDarkWarmupEnabled, warmupClusterName, warmupDuration, warmupExecutorService); ZKPersistentConnection zkPersistentConnection = getZkPersistentConnection(); ZooKeeperConnectionManager manager = createManagerForWarmupTests(false, zkPersistentConnection, warmupDuration, announcer); ZooKeeperEphemeralStore<UriProperties> store = createAndStartUriStore(); FutureCallback<None> managerStartCallback = new FutureCallback<>(); manager.start(managerStartCallback); // Ensure warm-up has begin before expiring the session try { managerStartCallback.get(1, TimeUnit.SECONDS); } catch (TimeoutException e) { // We are expecting TimeoutException here because the warmup is set to run for 5 seconds, // but we are getting the result from the callback after 1 sec, so the warm up should not have completed } UriProperties properties = store.get(warmupClusterName); assertNotNull(properties); assertEquals(properties.getPartitionDataMap(URI.create(_uri)).get(DefaultPartitionAccessor.DEFAULT_PARTITION_ID).getWeight(), WEIGHT); assertEquals(properties.Uris().size(), 1); // the new WEIGHT will be picked up only if the connection is re-established announcer.setWeight(newWeight); // expiring the session long oldSessionId = zkPersistentConnection.getZooKeeper().getSessionId(); ZKTestUtil.expireSession("localhost:" + PORT, zkPersistentConnection.getZooKeeper(), 10, TimeUnit.SECONDS); // making sure that a new session has been established. ZKTestUtil.waitForNewSessionEstablished(oldSessionId, zkPersistentConnection, 10, TimeUnit.SECONDS); // Validate the after new session creation, mark up has completed // Warm up will run again in this case as part of mark up for the new session AssertionMethods.assertWithTimeout((warmupDuration+1)*1000, () -> { UriProperties newProperties = store.get(_cluster); assertNotNull(newProperties); if (newProperties.getPartitionDataMap(URI.create(_uri)) == null) { Assert.fail("Supposed to have the uri present in ZK"); } assertEquals(newProperties.getPartitionDataMap(URI.create(_uri)).get(DefaultPartitionAccessor.DEFAULT_PARTITION_ID).getWeight(), newWeight); assertEquals(newProperties.Uris().size(), 1); newProperties = store.get(warmupClusterName); assertNotNull(newProperties); assertEquals(newProperties.Uris().size(), 0); }); shutdownManager(manager); }
public boolean getUnwrapPrimitives() { return UNWRAP.equalsIgnoreCase(properties.getOrDefault(UNWRAP_PRIMITIVES, WRAP)); }
@Test public void shouldGetExplicitUnwrapPrimitives() { // Given: final ProtobufProperties properties = new ProtobufProperties(ImmutableMap.of( ProtobufProperties.UNWRAP_PRIMITIVES, ProtobufProperties.UNWRAP )); // When/Then: assertThat(properties.getUnwrapPrimitives(), is(true)); }
@Override public Object decorate(RequestedField field, Object value, SearchUser searchUser) { final List<String> ids = parseIDs(value); final EntityTitleRequest req = ids.stream() .map(id -> new EntityIdentifier(id, FIELD_ENTITY_MAPPER.get(field.name()))) .collect(Collectors.collectingAndThen(Collectors.toList(), EntityTitleRequest::new)); final EntitiesTitleResponse response = entityTitleService.getTitles(req, searchUser); return extractTitles(ids, response.entities()).stream() .collect(Collectors.collectingAndThen(Collectors.toList(), titles -> value instanceof Collection<?> ? titles : unwrapIfSingleResult(titles))); }
@Test void testDecorateNotPermitted() { final EntitiesTitleResponse response = new EntitiesTitleResponse(Collections.emptySet(), Collections.singleton("123")); final FieldDecorator decorator = new TitleDecorator((request, permissions) -> response); Assertions.assertThat(decorator.decorate(RequestedField.parse("streams"), "123", TestSearchUser.builder().build())) .isEqualTo("123"); }
public void startProcessingOn(BackgroundJobServer backgroundJobServer) { if (getState() == StateName.PROCESSING) throw new ConcurrentJobModificationException(this); addJobState(new ProcessingState(backgroundJobServer)); }
@Test void jobCannotGoToProcessingTwice() { Job job = anEnqueuedJob().build(); job.startProcessingOn(backgroundJobServer); assertThatThrownBy(() -> job.startProcessingOn(backgroundJobServer)).isInstanceOf(ConcurrentJobModificationException.class); }
@Override public void handleGlobalFailure( Throwable cause, CompletableFuture<Map<String, String>> failureLabels) { logger.debug( "Ignore global failure because we already finished the job {}.", archivedExecutionGraph.getJobID(), cause); }
@Test void testGlobalFailureIgnored() { MockFinishedContext ctx = new MockFinishedContext(); createFinishedState(ctx) .handleGlobalFailure( new RuntimeException(), FailureEnricherUtils.EMPTY_FAILURE_LABELS); assertThat(ctx.getArchivedExecutionGraph().getState()).isEqualTo(testJobStatus); }
@Override public ValidationResult toValidationResult(String responseBody) { ValidationResult validationResult = new ValidationResult(); ArrayList<String> exceptions = new ArrayList<>(); try { Map result = (Map) GSON.fromJson(responseBody, Object.class); if (result == null) return validationResult; final Map<String, Object> errors = (Map<String, Object>) result.get("errors"); if (errors != null) { for (Map.Entry<String, Object> entry : errors.entrySet()) { if (!(entry.getValue() instanceof String)) { exceptions.add(String.format("Key: '%s' - The Json for Validation Request must contain a not-null error message of type String", entry.getKey())); } else { validationResult.addError(new ValidationError(entry.getKey(), entry.getValue().toString())); } } } if (!exceptions.isEmpty()) { throw new RuntimeException(StringUtils.join(exceptions, ", ")); } return validationResult; } catch (Exception e) { LOGGER.error("Error occurred while converting the Json to Validation Result. Error: {}. The Json received was '{}'.", e.getMessage(), responseBody); throw new RuntimeException(String.format("Error occurred while converting the Json to Validation Result. Error: %s.", e.getMessage())); } }
@Test public void shouldThrowExceptionForWrongJsonWhileConvertingJsonResponseToValidation() { assertTrue(new JsonBasedTaskExtensionHandler_V1().toValidationResult("{\"errors\":{}}").isSuccessful()); assertTrue(new JsonBasedTaskExtensionHandler_V1().toValidationResult("{}").isSuccessful()); assertTrue(new JsonBasedTaskExtensionHandler_V1().toValidationResult("").isSuccessful()); assertTrue(new JsonBasedTaskExtensionHandler_V1().toValidationResult(null).isSuccessful()); String jsonResponse2 = "{\"errors\":{\"key1\":\"err1\",\"key2\":true}}"; try { new JsonBasedTaskExtensionHandler_V1().toValidationResult(jsonResponse2); fail("should throw exception"); } catch (Exception e) { assertThat(e.getMessage(), is("Error occurred while converting the Json to Validation Result. Error: Key: 'key2' - The Json for Validation Request must contain a not-null error message of type String.")); } String jsonResponse3 = "{\"errors\":{\"key1\":null}}"; try { new JsonBasedTaskExtensionHandler_V1().toValidationResult(jsonResponse3); fail("should throw exception"); } catch (Exception e) { assertThat(e.getMessage(), is("Error occurred while converting the Json to Validation Result. Error: Key: 'key1' - The Json for Validation Request must contain a not-null error message of type String.")); } String jsonResponse4 = "{\"errors\":{\"key1\":true,\"key2\":\"err2\",\"key3\":null}}"; try { new JsonBasedTaskExtensionHandler_V1().toValidationResult(jsonResponse4); fail("should throw exception"); } catch (Exception e) { assertThat(e.getMessage(), is("Error occurred while converting the Json to Validation Result. Error: Key: 'key1' - The Json for Validation Request must contain a not-null error message of type String, Key: 'key3' - The Json for Validation Request must contain a not-null error message of type String.")); } }
@Override public AttributedList<Path> read(final Path directory, final List<String> replies) throws FTPInvalidListException { final AttributedList<Path> children = new AttributedList<Path>(); // At least one entry successfully parsed boolean success = false; // Call hook for those implementors which need to perform some action upon the list after it has been created // from the server stream, but before any clients see the list parser.preParse(replies); for(String line : replies) { final FTPFile f = parser.parseFTPEntry(line); if(null == f) { continue; } final String name = f.getName(); if(!success) { if(lenient) { // Workaround for #2410. STAT only returns ls of directory itself // Workaround for #2434. STAT of symbolic link directory only lists the directory itself. if(directory.getName().equals(name)) { log.warn(String.format("Skip %s matching parent directory name", f.getName())); continue; } if(name.contains(String.valueOf(Path.DELIMITER))) { if(!name.startsWith(directory.getAbsolute() + Path.DELIMITER)) { // Workaround for #2434. log.warn(String.format("Skip %s with delimiter in name", name)); continue; } } } } success = true; if(name.equals(".") || name.equals("..")) { if(log.isDebugEnabled()) { log.debug(String.format("Skip %s", f.getName())); } continue; } final Path parsed = new Path(directory, PathNormalizer.name(name), f.getType() == FTPFile.DIRECTORY_TYPE ? EnumSet.of(Path.Type.directory) : EnumSet.of(Path.Type.file)); switch(f.getType()) { case FTPFile.SYMBOLIC_LINK_TYPE: parsed.setType(EnumSet.of(Path.Type.file, Path.Type.symboliclink)); // Symbolic link target may be an absolute or relative path final String target = f.getLink(); if(StringUtils.isBlank(target)) { log.warn(String.format("Missing symbolic link target for %s", parsed)); final EnumSet<Path.Type> type = parsed.getType(); type.remove(Path.Type.symboliclink); } else if(StringUtils.startsWith(target, String.valueOf(Path.DELIMITER))) { parsed.setSymlinkTarget(new Path(PathNormalizer.normalize(target), EnumSet.of(Path.Type.file))); } else if(StringUtils.equals("..", target)) { parsed.setSymlinkTarget(directory); } else if(StringUtils.equals(".", target)) { parsed.setSymlinkTarget(parsed); } else { parsed.setSymlinkTarget(new Path(directory, target, EnumSet.of(Path.Type.file))); } break; } if(parsed.isFile()) { parsed.attributes().setSize(f.getSize()); } parsed.attributes().setOwner(f.getUser()); parsed.attributes().setGroup(f.getGroup()); Permission.Action u = Permission.Action.none; if(f.hasPermission(FTPFile.USER_ACCESS, FTPFile.READ_PERMISSION)) { u = u.or(Permission.Action.read); } if(f.hasPermission(FTPFile.USER_ACCESS, FTPFile.WRITE_PERMISSION)) { u = u.or(Permission.Action.write); } if(f.hasPermission(FTPFile.USER_ACCESS, FTPFile.EXECUTE_PERMISSION)) { u = u.or(Permission.Action.execute); } Permission.Action g = Permission.Action.none; if(f.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.READ_PERMISSION)) { g = g.or(Permission.Action.read); } if(f.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.WRITE_PERMISSION)) { g = g.or(Permission.Action.write); } if(f.hasPermission(FTPFile.GROUP_ACCESS, FTPFile.EXECUTE_PERMISSION)) { g = g.or(Permission.Action.execute); } Permission.Action o = Permission.Action.none; if(f.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.READ_PERMISSION)) { o = o.or(Permission.Action.read); } if(f.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.WRITE_PERMISSION)) { o = o.or(Permission.Action.write); } if(f.hasPermission(FTPFile.WORLD_ACCESS, FTPFile.EXECUTE_PERMISSION)) { o = o.or(Permission.Action.execute); } final Permission permission = new Permission(u, g, o); if(f instanceof FTPExtendedFile) { permission.setSetuid(((FTPExtendedFile) f).isSetuid()); permission.setSetgid(((FTPExtendedFile) f).isSetgid()); permission.setSticky(((FTPExtendedFile) f).isSticky()); } if(!Permission.EMPTY.equals(permission)) { parsed.attributes().setPermission(permission); } final Calendar timestamp = f.getTimestamp(); if(timestamp != null) { parsed.attributes().setModificationDate(timestamp.getTimeInMillis()); } children.add(parsed); } if(!success) { throw new FTPInvalidListException(children); } return children; }
@Test public void testDirectoryWithinSameName() throws Exception { // #8577 final Path directory = new Path("/aaa_bbb/untitled folder", EnumSet.of(Path.Type.directory)); final String[] lines = new String[]{ "drwx------ 0 null null 0 Feb 4 21:40 untitled folder", }; final AttributedList<Path> list = new FTPListResponseReader(new FTPParserSelector().getParser("UNIX")) .read(directory, Arrays.asList(lines)); assertEquals(1, list.size()); assertEquals("/aaa_bbb/untitled folder/untitled folder", list.get(0).getAbsolute()); }
public static String getPersistentTmpPath(AlluxioConfiguration ufsConfiguration, String path) { StringBuilder tempFilePath = new StringBuilder(); StringBuilder tempFileName = new StringBuilder(); String fileName = FilenameUtils.getName(path); String timeStamp = String.valueOf(System.currentTimeMillis()); String uuid = UUID.randomUUID().toString(); String tempDir = ufsConfiguration .getString(PropertyKey.UNDERFS_PERSISTENCE_ASYNC_TEMP_DIR); tempFilePath.append(tempDir); tempFilePath.append(AlluxioURI.SEPARATOR); tempFileName.append(fileName); tempFileName.append(".alluxio."); tempFileName.append(timeStamp); tempFileName.append("."); tempFileName.append(uuid); tempFileName.append(".tmp"); tempFilePath.append(tempFileName); return tempFilePath.toString(); }
@Test public void getPersistentTmpPath() { // Get temporary path Pattern pattern = Pattern.compile( "\\.alluxio_ufs_persistence\\/test\\.parquet\\.alluxio\\.\\d+\\.\\S+\\.tmp"); AlluxioConfiguration alluxioConfiguration = UnderFileSystemConfiguration.emptyConfig(); String tempPersistencePath = PathUtils.getPersistentTmpPath(alluxioConfiguration, "s3://test/test.parquet"); assertEquals(pattern.matcher(tempPersistencePath).matches(), true); pattern = Pattern.compile( "\\.alluxio_ufs_persistence\\/test\\.parquet\\.alluxio\\.\\d+\\.\\S+\\.tmp"); tempPersistencePath = PathUtils .getPersistentTmpPath(alluxioConfiguration, "hdfs://localhost:9010/test/test.parquet"); assertEquals(pattern.matcher(tempPersistencePath).matches(), true); // Get temporary path with root path pattern = Pattern.compile( "\\.alluxio_ufs_persistence\\/test\\.parquet\\.alluxio\\.\\d+\\.\\S+\\.tmp"); tempPersistencePath = PathUtils.getPersistentTmpPath(alluxioConfiguration, "s3://test.parquet"); assertEquals(pattern.matcher(tempPersistencePath).matches(), true); pattern = Pattern.compile( "\\.alluxio_ufs_persistence\\/test\\.parquet\\.alluxio\\.\\d+\\.\\S+\\.tmp"); tempPersistencePath = PathUtils .getPersistentTmpPath(alluxioConfiguration, "hdfs://localhost:9010/test.parquet"); assertEquals(pattern.matcher(tempPersistencePath).matches(), true); }
@Override public ChannelFuture writeHeaders(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding, boolean endStream, ChannelPromise promise) { return writeHeaders0(ctx, streamId, headers, false, 0, (short) 0, false, padding, endStream, promise); }
@Test public void trailersDoNotEndStreamThrows() { writeAllFlowControlledFrames(); final int streamId = 6; ChannelPromise promise = newPromise(); encoder.writeHeaders(ctx, streamId, EmptyHttp2Headers.INSTANCE, 0, false, promise); ChannelPromise promise2 = newPromise(); ChannelFuture future = encoder.writeHeaders(ctx, streamId, EmptyHttp2Headers.INSTANCE, 0, false, promise2); assertTrue(future.isDone()); assertFalse(future.isSuccess()); verify(writer, times(1)).writeHeaders(eq(ctx), eq(streamId), eq(EmptyHttp2Headers.INSTANCE), eq(0), eq(false), eq(promise)); }
@VisibleForTesting String validateMail(String mail) { if (StrUtil.isEmpty(mail)) { throw exception(MAIL_SEND_MAIL_NOT_EXISTS); } return mail; }
@Test public void testValidateMail_notExists() { // 准备参数 // mock 方法 // 调用,并断言异常 assertServiceException(() -> mailSendService.validateMail(null), MAIL_SEND_MAIL_NOT_EXISTS); }
public static void main(String[] args) throws Exception { var videos = Map.of( 1, new Video(1, "Avatar", 178, "epic science fiction film", "James Cameron", "English"), 2, new Video(2, "Godzilla Resurgence", 120, "Action & drama movie|", "Hideaki Anno", "Japanese"), 3, new Video(3, "Interstellar", 169, "Adventure & Sci-Fi", "Christopher Nolan", "English") ); var videoResource = new VideoResource(new FieldJsonMapper(), videos); LOGGER.info("Retrieving full response from server:-"); LOGGER.info("Get all video information:"); var videoDetails = videoResource.getDetails(1); LOGGER.info(videoDetails); LOGGER.info("----------------------------------------------------------"); LOGGER.info("Retrieving partial response from server:-"); LOGGER.info("Get video @id, @title, @director:"); var specificFieldsDetails = videoResource.getDetails(3, "id", "title", "director"); LOGGER.info(specificFieldsDetails); LOGGER.info("Get video @id, @length:"); var videoLength = videoResource.getDetails(3, "id", "length"); LOGGER.info(videoLength); }
@Test void shouldExecuteApplicationWithoutException() { Assertions.assertDoesNotThrow(() -> App.main(new String[]{})); }
@Override public void close() throws Exception { handlesToClose.forEach(IOUtils::closeQuietly); handlesToClose.clear(); if (sharedResources != null) { sharedResources.close(); } cleanRelocatedDbLogs(); }
@Test public void testSharedResourcesAfterClose() throws Exception { OpaqueMemoryResource<RocksDBSharedResources> sharedResources = getSharedResources(); RocksDBResourceContainer container = new RocksDBResourceContainer(PredefinedOptions.DEFAULT, null, sharedResources); container.close(); RocksDBSharedResources rocksDBSharedResources = sharedResources.getResourceHandle(); assertThat(rocksDBSharedResources.getCache().isOwningHandle(), is(false)); assertThat(rocksDBSharedResources.getWriteBufferManager().isOwningHandle(), is(false)); }
public Map<COSObjectKey, COSBase> parseAllObjects() throws IOException { Map<COSObjectKey, COSBase> allObjects = new HashMap<>(); try { Map<Integer, Long> objectNumbers = privateReadObjectOffsets(); // count the number of object numbers eliminating double entries long numberOfObjNumbers = objectNumbers.values().stream().distinct().count(); // the usage of the index should be restricted to cases where more than one // object use the same object number. // there are malformed pdfs in the wild which would lead to false results if // pdfbox always relies on the index if available. In most cases the object number // is sufficient to choose the correct object boolean indexNeeded = objectNumbers.size() > numberOfObjNumbers; long currentPosition = source.getPosition(); if (firstObject > 0 && currentPosition < firstObject) { source.skip(firstObject - (int) currentPosition); } int index = 0; for (Entry<Integer, Long> entry : objectNumbers.entrySet()) { COSObjectKey objectKey = getObjectKey(entry.getValue(), 0); // skip object if the index doesn't match if (indexNeeded && objectKey.getStreamIndex() > -1 && objectKey.getStreamIndex() != index) { index++; continue; } int finalPosition = firstObject + entry.getKey(); currentPosition = source.getPosition(); if (finalPosition > 0 && currentPosition < finalPosition) { // jump to the offset of the object to be parsed source.skip(finalPosition - (int) currentPosition); } COSBase streamObject = parseDirObject(); if (streamObject != null) { streamObject.setDirect(false); } allObjects.put(objectKey, streamObject); index++; } } finally { source.close(); document = null; } return allObjects; }
@Test void testParseAllObjects() throws IOException { COSStream stream = new COSStream(); stream.setItem(COSName.N, COSInteger.TWO); stream.setItem(COSName.FIRST, COSInteger.get(8)); OutputStream outputStream = stream.createOutputStream(); outputStream.write("6 0 4 5 true false".getBytes()); outputStream.close(); PDFObjectStreamParser objectStreamParser = new PDFObjectStreamParser(stream, null); Map<COSObjectKey, COSBase> objectNumbers = objectStreamParser.parseAllObjects(); assertEquals(2, objectNumbers.size()); assertEquals(COSBoolean.TRUE, objectNumbers.get(new COSObjectKey(6, 0))); assertEquals(COSBoolean.FALSE, objectNumbers.get(new COSObjectKey(4, 0))); }
public static PluginDefinition forRemotePlugin(PluginInfo remotePluginInfo) { return new AutoValue_PluginDefinition( checkNotNull(remotePluginInfo), Optional.empty(), Optional.empty(), false, Optional.empty()); }
@Test public void forRemotePlugin_whenPassedNull_throwsException() { assertThrows(NullPointerException.class, () -> PluginDefinition.forRemotePlugin(null)); }
@Override public void deactivate(String id, Boolean anonymize) { userSession.checkLoggedIn().checkIsSystemAdministrator(); checkRequest(!id.equals(userSession.getUuid()), "Self-deactivation is not possible"); userService.deactivate(id, anonymize); }
@Test public void deactivate_whenAnonymizeTrue_shouldDeactivateUserWithAnonymization() throws Exception { userSession.logIn().setSystemAdministrator(); mockMvc.perform(delete(USER_ENDPOINT + "/userToDelete").param("anonymize", "true")) .andExpect(status().isNoContent()); verify(userService).deactivate("userToDelete", true); }
@Override public URL getResource(String name) { if (isExcluded(name)) { return super.getResource(name); } return null; }
@Test public void canExcludeLoadingResourcesFromJar() { // Not sure of the intention of this; this characterises existing behaviour assertThat(new NestedJarClassLoader(testJar, "helloworld").getResource("helloworld.jar")) .isNotNull(); assertThat(new NestedJarClassLoader(testJar).getResource("helloworld.jar")) .isNull(); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof OpenOptions)) { return false; } OpenOptions that = (OpenOptions) o; return Objects.equal(mOffset, that.mOffset) && Objects.equal(mLength, that.mLength) && Objects.equal(mRecoverFailedOpen, that.mRecoverFailedOpen) && Objects.equal(mPositionShort, that.mPositionShort); }
@Test public void equalsTest() throws Exception { CommonUtils.testEquals(OpenOptions.class); }
@Override protected int command() { if (!validateConfigFilePresent()) { return 1; } final MigrationConfig config; try { config = MigrationConfig.load(getConfigFile()); } catch (KsqlException | MigrationException e) { LOGGER.error(e.getMessage()); return 1; } return command(config, MigrationsUtil::getKsqlClient); }
@Test public void shouldInitializeEvenIfCantParseServerVersion() { // Given: when(serverInfo.getServerVersion()).thenReturn("not_a_valid_version"); // When: final int status = command.command(config, cfg -> client); // Then: assertThat(status, is(0)); verify(client).executeStatement(EXPECTED_CS_STATEMENT); verify(client).executeStatement(EXPECTED_CTAS_STATEMENT); }
public void loadProperties(Properties properties) { Set<Entry<Object, Object>> entries = properties.entrySet(); for (Entry entry : entries) { String key = (String) entry.getKey(); Object value = entry.getValue(); String[] keySplit = key.split("[.]"); Map<String, Object> target = this; for (int i = 0; i < keySplit.length - 1; i++) { if (!target.containsKey(keySplit[i])) { HashMap subEntry = new HashMap(); target.put(keySplit[i], subEntry); target = subEntry; } else { Object subEntry = target.get(keySplit[i]); if (!(subEntry instanceof Map)) { HashMap replace = new HashMap(); replace.put("_", subEntry); target.put(keySplit[i], replace); } target = (Map<String, Object>) target.get(keySplit[i]); } } if (target.get(keySplit[keySplit.length - 1]) instanceof Map) { ((Map) target.get(keySplit[keySplit.length - 1])).put("_", value); } else { target.put(keySplit[keySplit.length - 1], value); } } }
@Test void testLoadPropertyOverrideString() { // given K8sSpecTemplate template = new K8sSpecTemplate(); Properties p = new Properties(); p.put("k8s", "v1"); p.put("k8s.key1", "v2"); // when template.loadProperties(p); // then assertEquals("v1", ((Map) template.get("k8s")).get("_")); assertEquals("v2", ((Map) template.get("k8s")).get("key1")); }
@Override public byte[] evaluateResponse(byte[] response) throws SaslException, SaslAuthenticationException { try { switch (state) { case RECEIVE_CLIENT_FIRST_MESSAGE: this.clientFirstMessage = new ClientFirstMessage(response); this.scramExtensions = clientFirstMessage.extensions(); if (!SUPPORTED_EXTENSIONS.containsAll(scramExtensions.map().keySet())) { log.debug("Unsupported extensions will be ignored, supported {}, provided {}", SUPPORTED_EXTENSIONS, scramExtensions.map().keySet()); } String serverNonce = formatter.secureRandomString(); try { String saslName = clientFirstMessage.saslName(); String username = ScramFormatter.username(saslName); NameCallback nameCallback = new NameCallback("username", username); ScramCredentialCallback credentialCallback; if (scramExtensions.tokenAuthenticated()) { DelegationTokenCredentialCallback tokenCallback = new DelegationTokenCredentialCallback(); credentialCallback = tokenCallback; callbackHandler.handle(new Callback[]{nameCallback, tokenCallback}); if (tokenCallback.tokenOwner() == null) throw new SaslException("Token Authentication failed: Invalid tokenId : " + username); this.authorizationId = tokenCallback.tokenOwner(); this.tokenExpiryTimestamp = tokenCallback.tokenExpiryTimestamp(); } else { credentialCallback = new ScramCredentialCallback(); callbackHandler.handle(new Callback[]{nameCallback, credentialCallback}); this.authorizationId = username; this.tokenExpiryTimestamp = null; } this.scramCredential = credentialCallback.scramCredential(); if (scramCredential == null) throw new SaslException("Authentication failed: Invalid user credentials"); String authorizationIdFromClient = clientFirstMessage.authorizationId(); if (!authorizationIdFromClient.isEmpty() && !authorizationIdFromClient.equals(username)) throw new SaslAuthenticationException("Authentication failed: Client requested an authorization id that is different from username"); if (scramCredential.iterations() < mechanism.minIterations()) throw new SaslException("Iterations " + scramCredential.iterations() + " is less than the minimum " + mechanism.minIterations() + " for " + mechanism); this.serverFirstMessage = new ServerFirstMessage(clientFirstMessage.nonce(), serverNonce, scramCredential.salt(), scramCredential.iterations()); setState(State.RECEIVE_CLIENT_FINAL_MESSAGE); return serverFirstMessage.toBytes(); } catch (SaslException | AuthenticationException e) { throw e; } catch (Throwable e) { throw new SaslException("Authentication failed: Credentials could not be obtained", e); } case RECEIVE_CLIENT_FINAL_MESSAGE: try { ClientFinalMessage clientFinalMessage = new ClientFinalMessage(response); verifyClientProof(clientFinalMessage); byte[] serverKey = scramCredential.serverKey(); byte[] serverSignature = formatter.serverSignature(serverKey, clientFirstMessage, serverFirstMessage, clientFinalMessage); ServerFinalMessage serverFinalMessage = new ServerFinalMessage(null, serverSignature); clearCredentials(); setState(State.COMPLETE); return serverFinalMessage.toBytes(); } catch (InvalidKeyException e) { throw new SaslException("Authentication failed: Invalid client final message", e); } default: throw new IllegalSaslStateException("Unexpected challenge in Sasl server state " + state); } } catch (SaslException | AuthenticationException e) { clearCredentials(); setState(State.FAILED); throw e; } }
@Test public void noAuthorizationIdSpecified() throws Exception { byte[] nextChallenge = saslServer.evaluateResponse(clientFirstMessage(USER_A, null)); assertTrue(nextChallenge.length > 0, "Next challenge is empty"); }
public boolean optionallyValidateClientResponseStatusCode(int statusCode) throws Exception { HttpStatus httpStatus = HttpStatus.resolve(statusCode); if (this.statusCodesValid.isPresent() && httpStatus!=null) { if (!this.statusCodesValid.get().contains(httpStatus)) { return false; } } else { if ((httpStatus!=null && httpStatus.isError()) || (httpStatus==null && statusCode>=400) ) return false; } return true; }
@Test public void testErrorStatus() throws Exception{ assertFalse(http2ServiceRequest.optionallyValidateClientResponseStatusCode(400)); assertFalse(http2ServiceRequest.optionallyValidateClientResponseStatusCode(404)); assertFalse(http2ServiceRequest.optionallyValidateClientResponseStatusCode(408)); assertFalse(http2ServiceRequest.optionallyValidateClientResponseStatusCode(500)); assertFalse(http2ServiceRequest.optionallyValidateClientResponseStatusCode(505)); assertFalse(http2ServiceRequest.optionallyValidateClientResponseStatusCode(502)); }
@DELETE @Produces(MediaType.APPLICATION_JSON) @Path("/{device_id}") @ChangesLinkedDevices public void removeDevice(@Mutable @Auth AuthenticatedDevice auth, @PathParam("device_id") byte deviceId) { if (auth.getAuthenticatedDevice().getId() != Device.PRIMARY_ID && auth.getAuthenticatedDevice().getId() != deviceId) { throw new WebApplicationException(Response.Status.UNAUTHORIZED); } if (deviceId == Device.PRIMARY_ID) { throw new ForbiddenException(); } accounts.removeDevice(auth.getAccount(), deviceId).join(); }
@Test void unlinkPrimaryDevice() { // this is a static mock, so it might have previous invocations clearInvocations(AuthHelper.VALID_ACCOUNT); try (final Response response = resources .getJerseyTest() .target("/v1/devices/" + Device.PRIMARY_ID) .request() .header("Authorization", AuthHelper.getAuthHeader(AuthHelper.VALID_UUID, AuthHelper.VALID_PASSWORD)) .header(HttpHeaders.USER_AGENT, "Signal-Android/5.42.8675309 Android/30") .delete()) { assertThat(response.getStatus()).isEqualTo(403); verify(accountsManager, never()).removeDevice(any(), anyByte()); } }
@Override void validateKeyPresent(final SourceName sinkName, final Projection projection) { if (!PartitionByParamsFactory.isPartitionByNull(partitionBys) && !containsExpressions(projection, partitionBys)) { throwKeysNotIncludedError(sinkName, "partitioning expression", originalPartitionBys); } }
@Test public void shouldThrowIfProjectionMissingPartitionBy() { // When: final Exception e = assertThrows( KsqlException.class, () -> repartitionNode.validateKeyPresent(SOURCE_NAME, projection) ); // Then: assertThat(e.getMessage(), containsString("The query used to build `S1` " + "must include the partitioning expression T.ID in its projection (eg, SELECT T.ID...")); }
public PluginRoleConfig getPluginRole(CaseInsensitiveString roleName) { for (PluginRoleConfig pluginRoleConfig : rolesConfig.getPluginRoleConfigs()) { if (pluginRoleConfig.getName().equals(roleName)) { return pluginRoleConfig; } } return null; }
@Test public void getPluginRole_shouldReturnNullInAbsenceOfPluginRoleForTheGivenName() throws Exception { SecurityConfig securityConfig = new SecurityConfig(); assertNull(securityConfig.getPluginRole(new CaseInsensitiveString("foo"))); }
public int getUserCount() { return provider.getUserCount(); }
@Test public void verifyUserCountIsTwo() throws Exception{ final int result = userManager.getUserCount(); assertThat(result, is(2)); }
public Set<String> keySet() { return keys; }
@Test public void testKeySet_whenPropertiesAvailable() { Properties props = new Properties(); props.setProperty("key1", "value1"); props.setProperty("key2", "value2"); HazelcastProperties properties = new HazelcastProperties(props); assertEquals(props.keySet(), properties.keySet()); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay readerWay, IntsRef relationFlags) { RoadAccess accessValue = YES; List<Map<String, Object>> nodeTags = readerWay.getTag("node_tags", Collections.emptyList()); // a barrier edge has the restriction in both nodes and the tags are the same if (readerWay.hasTag("gh:barrier_edge")) for (String restriction : restrictions) { Object value = nodeTags.get(0).get(restriction); if (value != null) accessValue = getRoadAccess((String) value, accessValue); } for (String restriction : restrictions) { accessValue = getRoadAccess(readerWay.getTag(restriction), accessValue); } CountryRule countryRule = readerWay.getTag("country_rule", null); if (countryRule != null) accessValue = countryRule.getAccess(readerWay, TransportationMode.CAR, accessValue); roadAccessEnc.setEnum(false, edgeId, edgeIntAccess, accessValue); }
@Test void countryRule() { IntsRef relFlags = new IntsRef(2); ReaderWay way = new ReaderWay(27L); way.setTag("highway", "track"); way.setTag("country_rule", new CountryRule() { @Override public RoadAccess getAccess(ReaderWay readerWay, TransportationMode transportationMode, RoadAccess currentRoadAccess) { return RoadAccess.DESTINATION; } }); EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1); int edgeId = 0; parser.handleWayTags(edgeId, edgeIntAccess, way, relFlags); assertEquals(RoadAccess.DESTINATION, roadAccessEnc.getEnum(false, edgeId, edgeIntAccess)); // if there is no country rule we get the default value edgeIntAccess = new ArrayEdgeIntAccess(1); way.removeTag("country_rule"); parser.handleWayTags(edgeId, edgeIntAccess, way, relFlags); assertEquals(RoadAccess.YES, roadAccessEnc.getEnum(false, edgeId, edgeIntAccess)); way.setTag("motor_vehicle", "agricultural;forestry"); parser.handleWayTags(edgeId, edgeIntAccess, way, relFlags); assertEquals(RoadAccess.AGRICULTURAL, roadAccessEnc.getEnum(false, edgeId, edgeIntAccess)); way.setTag("motor_vehicle", "forestry;agricultural"); parser.handleWayTags(edgeId, edgeIntAccess, way, relFlags); assertEquals(RoadAccess.AGRICULTURAL, roadAccessEnc.getEnum(false, edgeId, edgeIntAccess)); }
@Override public List<QueryMetadata> getAllLiveQueries() { return ImmutableList.copyOf(allLiveQueries.values()); }
@Test public void shouldNotIncludeStreamPullInLiveQueries() { // Given: final Set<QueryMetadata> queries = ImmutableSet.of( givenCreate(registry, "q1", "source1", Optional.of("sink"), CREATE_AS), givenCreate(registry, "q2", "source2", Optional.empty(), CREATE_SOURCE), givenCreateTransient(registry, "transient1") ); givenStreamPull(registry, "streamPull1"); // When: final Set<QueryMetadata> listed = ImmutableSet.<QueryMetadata>builder().addAll(registry.getAllLiveQueries()).build(); // Then: assertThat(listed, equalTo(queries)); }
@Override public Date getDate( Object object ) throws KettleValueException { Timestamp timestamp = getTimestamp( object ); if ( timestamp == null ) { return null; } return timestamp; }
@Test public void testConvertTimestampToDate_Null() throws KettleValueException { ValueMetaTimestamp valueMetaTimestamp = new ValueMetaTimestamp(); assertNull( valueMetaTimestamp.getDate( null ) ); }
@Override public Messages process(Messages messages) { try (Timer.Context ignored = executionTime.time()) { final State latestState = stateUpdater.getLatestState(); if (latestState.enableRuleMetrics()) { return process(messages, new RuleMetricsListener(metricRegistry), latestState); } return process(messages, new NoopInterpreterListener(), latestState); } }
@Test public void testMatchEitherContinuesIfOneRuleMatched() { final RuleService ruleService = mock(MongoDbRuleService.class); when(ruleService.loadAll()).thenReturn(ImmutableList.of(RULE_TRUE, RULE_FALSE, RULE_ADD_FOOBAR)); final PipelineService pipelineService = mock(MongoDbPipelineService.class); when(pipelineService.loadAll()).thenReturn(Collections.singleton( PipelineDao.create("p1", "title", "description", "pipeline \"pipeline\"\n" + "stage 0 match either\n" + " rule \"true\";\n" + " rule \"false\";\n" + "stage 1 match either\n" + " rule \"add_foobar\";\n" + "end\n", Tools.nowUTC(), null) )); final Map<String, Function<?>> functions = ImmutableMap.of(SetField.NAME, new SetField()); final PipelineInterpreter interpreter = createPipelineInterpreter(ruleService, pipelineService, functions); final Messages processed = interpreter.process(messageInDefaultStream("message", "test")); final List<Message> messages = ImmutableList.copyOf(processed); assertThat(messages).hasSize(1); final Message actualMessage = messages.get(0); assertThat(actualMessage.getFieldAs(String.class, "foobar")).isEqualTo("covfefe"); }
public long size() { return counters.size(); }
@Test public void testAddWithAlreadyPresentNameReturnsFalse() { Counter<?, ?> c1 = counterSet.longSum(name1); Counter<?, ?> c1Dup = counterSet.longSum(name1); assertSame(c1, c1Dup); assertThat(counterSet.size(), equalTo(1L)); }
public static List<String> getJavaOpts(Configuration conf) { String adminOpts = conf.get(YarnConfiguration.NM_CONTAINER_LOCALIZER_ADMIN_JAVA_OPTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_ADMIN_JAVA_OPTS_DEFAULT); String userOpts = conf.get(YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_DEFAULT); boolean isExtraJDK17OptionsConfigured = conf.getBoolean(YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_ADD_EXPORTS_KEY, YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_ADD_EXPORTS_DEFAULT); if (Shell.isJavaVersionAtLeast(17) && isExtraJDK17OptionsConfigured) { userOpts = userOpts.trim().concat(" " + ADDITIONAL_JDK17_PLUS_OPTIONS); } List<String> adminOptionList = Arrays.asList(adminOpts.split("\\s+")); List<String> userOptionList = Arrays.asList(userOpts.split("\\s+")); return Stream.concat(adminOptionList.stream(), userOptionList.stream()) .filter(s -> !s.isEmpty()) .collect(Collectors.toList()); }
@Test public void testUserOptionsWhenAdminOptionsAreNotDefined() throws Exception { ContainerLocalizerWrapper wrapper = new ContainerLocalizerWrapper(); ContainerLocalizer localizer = wrapper.setupContainerLocalizerForTest(); Configuration conf = new Configuration(); conf.setStrings(YarnConfiguration.NM_CONTAINER_LOCALIZER_JAVA_OPTS_KEY, "userOption1 userOption2"); List<String> javaOpts = localizer.getJavaOpts(conf); Assert.assertEquals(2, javaOpts.size()); Assert.assertTrue(javaOpts.get(0).equals("userOption1")); Assert.assertTrue(javaOpts.get(1).equals("userOption2")); }
@Override @SuppressWarnings("unchecked") public int run() throws IOException { Preconditions.checkArgument( input != null && output != null, "Both input and output parquet file paths are required."); Preconditions.checkArgument(codec != null, "The codec cannot be null"); Path inPath = new Path(input); Path outPath = new Path(output); CompressionCodecName codecName = Codecs.parquetCodec(codec); ParquetMetadata metaData = ParquetFileReader.readFooter(getConf(), inPath, NO_FILTER); MessageType schema = metaData.getFileMetaData().getSchema(); ParquetFileWriter writer = new ParquetFileWriter(getConf(), schema, outPath, ParquetFileWriter.Mode.CREATE); writer.start(); try (TransParquetFileReader reader = new TransParquetFileReader( HadoopInputFile.fromPath(inPath, getConf()), HadoopReadOptions.builder(getConf()).build())) { compressionConverter.processBlocks( reader, writer, metaData, schema, metaData.getFileMetaData().getCreatedBy(), codecName); } finally { writer.end(metaData.getFileMetaData().getKeyValueMetaData()); } return 0; }
@Test public void testTransCompressionCommand_zstd() throws IOException { TransCompressionCommand command = new TransCompressionCommand(createLogger()); command.input = parquetFile().getAbsolutePath(); File output = new File(getTempFolder(), getClass().getSimpleName() + ".converted-2.zstd.parquet"); command.output = output.getAbsolutePath(); command.codec = "zstd"; command.setConf(new Configuration()); Assert.assertEquals(0, command.run()); Assert.assertTrue(output.exists()); }
public static String getProperty(final String propertyName) { final String propertyValue = System.getProperty(propertyName); return NULL_PROPERTY_VALUE.equals(propertyValue) ? null : propertyValue; }
@Test void shouldGetNullPropertyWithDefault() { final String key = "org.agrona.test.case"; final String value = "@null"; System.setProperty(key, value); try { assertNull(SystemUtil.getProperty(key, "default")); } finally { System.clearProperty(key); } }
String getAgentStatusReportRequestBody(JobIdentifier identifier, String elasticAgentId) { JsonObject jsonObject = new JsonObject(); if (identifier != null) { jsonObject.add("job_identifier", jobIdentifierJson(identifier)); } jsonObject.addProperty("elastic_agent_id", elasticAgentId); return FORCED_EXPOSE_GSON.toJson(jsonObject); }
@Test public void shouldJSONizeElasticAgentStatusReportRequestBodyWhenJobIdentifierIsProvided() throws Exception { String actual = new ElasticAgentExtensionConverterV4().getAgentStatusReportRequestBody(jobIdentifier, null); String expected = """ { "job_identifier": { "pipeline_name": "test-pipeline", "pipeline_counter": 1, "pipeline_label": "Test Pipeline", "stage_name": "test-stage", "stage_counter": "1", "job_name": "test-job", "job_id": 100 } }"""; assertThatJson(expected).isEqualTo(actual); }
@Override public int hashCode() { if (value == null) { return 31; } // Using recommended hashing algorithm from Effective Java for longs and doubles if (isIntegral(this)) { long value = getAsNumber().longValue(); return (int) (value ^ (value >>> 32)); } if (value instanceof Number) { long value = Double.doubleToLongBits(getAsNumber().doubleValue()); return (int) (value ^ (value >>> 32)); } return value.hashCode(); }
@Test public void testByteEqualsBigInteger() { JsonPrimitive p1 = new JsonPrimitive((byte) 10); JsonPrimitive p2 = new JsonPrimitive(new BigInteger("10")); assertThat(p1).isEqualTo(p2); assertThat(p1.hashCode()).isEqualTo(p2.hashCode()); }
public static List<Pair<Integer>> scanRaw(String str) { return URIScanner.scanRaw(str); }
@Test public void testScanRaw() { List<Pair<Integer>> pairs1 = URISupport.scanRaw("password=RAW(++?5w0rd)&serviceName=somechat"); assertEquals(1, pairs1.size()); assertEquals(new Pair(9, 21), pairs1.get(0)); List<Pair<Integer>> pairs2 = URISupport.scanRaw("password=RAW{++?5w0rd}&serviceName=somechat"); assertEquals(1, pairs2.size()); assertEquals(new Pair(9, 21), pairs2.get(0)); List<Pair<Integer>> pairs3 = URISupport.scanRaw("password=RAW{++?)&0rd}&serviceName=somechat"); assertEquals(1, pairs3.size()); assertEquals(new Pair(9, 21), pairs3.get(0)); List<Pair<Integer>> pairs4 = URISupport.scanRaw("password1=RAW(++?}&0rd)&password2=RAW{++?)&0rd}&serviceName=somechat"); assertEquals(2, pairs4.size()); assertEquals(new Pair(10, 22), pairs4.get(0)); assertEquals(new Pair(34, 46), pairs4.get(1)); }
@Override public void expand( ExpansionApi.ExpansionRequest request, StreamObserver<ExpansionApi.ExpansionResponse> responseObserver) { if (!checkedAllServices) { try { waitForAllServicesToBeReady(); } catch (TimeoutException e) { throw new RuntimeException(e); } checkedAllServices = true; } try { responseObserver.onNext(processExpand(request)); responseObserver.onCompleted(); } catch (RuntimeException exn) { responseObserver.onNext( ExpansionApi.ExpansionResponse.newBuilder() .setError(Throwables.getStackTraceAsString(exn)) .build()); responseObserver.onCompleted(); } }
@Test public void testExpandFail() { ExpansionServiceClient expansionServiceClient = Mockito.mock(ExpansionServiceClient.class); Mockito.when(clientFactory.getExpansionServiceClient(Mockito.any())) .thenReturn(expansionServiceClient); Mockito.when(expansionServiceClient.expand(Mockito.any())) .thenReturn(ExpansionResponse.newBuilder().setError("expansion error 1").build()) .thenReturn(ExpansionResponse.newBuilder().setError("expansion error 2").build()); ExpansionRequest request = ExpansionRequest.newBuilder().build(); StreamObserver<ExpansionResponse> responseObserver = Mockito.mock(StreamObserver.class); expansionService.expand(request, responseObserver); Mockito.verify(expansionServiceClient, Mockito.times(2)).expand(request); ArgumentCaptor<ExpansionResponse> expansionResponseCapture = ArgumentCaptor.forClass(ExpansionResponse.class); Mockito.verify(responseObserver).onNext(expansionResponseCapture.capture()); // Error response should contain errors from both expansion services. assertTrue(expansionResponseCapture.getValue().getError().contains("expansion error 1")); assertTrue(expansionResponseCapture.getValue().getError().contains("expansion error 2")); }
public static DateTimeFormatter getTimeFormatter() { return TIME; }
@Test void assertGetTimeFormatter() { assertThat(DateTimeFormatterFactory.getTimeFormatter().parse("00:00:00").toString(), is("{},ISO resolved to 00:00")); }
public static List<URL> parseConfigurators(String rawConfig) { // compatible url JsonArray, such as [ "override://xxx", "override://xxx" ] List<URL> compatibleUrls = parseJsonArray(rawConfig); if (CollectionUtils.isNotEmpty(compatibleUrls)) { return compatibleUrls; } List<URL> urls = new ArrayList<>(); ConfiguratorConfig configuratorConfig = parseObject(rawConfig); String scope = configuratorConfig.getScope(); List<ConfigItem> items = configuratorConfig.getConfigs(); if (ConfiguratorConfig.SCOPE_APPLICATION.equals(scope)) { items.forEach(item -> urls.addAll(appItemToUrls(item, configuratorConfig))); } else { // service scope by default. items.forEach(item -> urls.addAll(serviceItemToUrls(item, configuratorConfig))); } return urls; }
@Test void parseConfiguratorsServiceNoRuleTest() { Assertions.assertThrows(IllegalStateException.class, () -> { try (InputStream yamlStream = this.getClass().getResourceAsStream("/ServiceNoRule.yml")) { ConfigParser.parseConfigurators(streamToString(yamlStream)); Assertions.fail(); } }); }
@Override public void run() { // top-level command, do nothing }
@Test public void test_saveSnapshot_invalidNameOrId() { // When // Then exception.expectMessage("No job with name or id 'invalid' was found"); run("save-snapshot", "invalid", "my-snapshot"); }
public static boolean isJCacheAvailable(ClassLoader classLoader) { return isJCacheAvailable((className) -> ClassLoaderUtil.isClassAvailable(classLoader, className)); }
@Test public void testIsJCacheAvailable_notFound() { JCacheDetector.ClassAvailabilityChecker classAvailabilityChecker = className -> false; assertFalse(isJCacheAvailable(classAvailabilityChecker)); }
@Override public void createDataStream(String dataStreamName, String timestampField, Map<String, Map<String, String>> mappings, Policy ismPolicy) { updateDataStreamTemplate(dataStreamName, timestampField, mappings); dataStreamAdapter.createDataStream(dataStreamName); dataStreamAdapter.applyIsmPolicy(dataStreamName, ismPolicy); dataStreamAdapter.setNumberOfReplicas(dataStreamName, replicas); }
@SuppressWarnings("unchecked") @Test public void templateCreatesTimestampMapping() { final Map<String, Map<String, String>> mappings = new HashMap<>(); String ts = "ts"; dataStreamService.createDataStream("teststream", ts, mappings, mock(Policy.class)); ArgumentCaptor<Template> templateCaptor = ArgumentCaptor.forClass(Template.class); verify(dataStreamAdapter).ensureDataStreamTemplate(anyString(), templateCaptor.capture(), anyString()); Map<String, Object> fieldMappings = (Map<String, Object>) templateCaptor.getValue().mappings().get("properties"); Map<String, String> timestampMapping = (Map<String, String>) fieldMappings.get(ts); assertThat(timestampMapping).isNotNull(); assertThat(timestampMapping.get("type")).isEqualTo("date"); assertThat(timestampMapping.get("format")).isEqualTo("yyyy-MM-dd HH:mm:ss.SSS||strict_date_optional_time||epoch_millis"); }
public void measureAccountOutgoingMessageUuidMismatches(final Account account, final OutgoingMessageEntity outgoingMessage) { measureAccountDestinationUuidMismatches(account, outgoingMessage.destinationUuid()); }
@Test void measureAccountOutgoingMessageUuidMismatches() { final OutgoingMessageEntity outgoingMessageToAci = createOutgoingMessageEntity(new AciServiceIdentifier(aci)); messageMetrics.measureAccountOutgoingMessageUuidMismatches(account, outgoingMessageToAci); Optional<Counter> counter = findCounter(simpleMeterRegistry); assertTrue(counter.isEmpty()); final OutgoingMessageEntity outgoingMessageToPni = createOutgoingMessageEntity(new PniServiceIdentifier(pni)); messageMetrics.measureAccountOutgoingMessageUuidMismatches(account, outgoingMessageToPni); counter = findCounter(simpleMeterRegistry); assertTrue(counter.isEmpty()); final OutgoingMessageEntity outgoingMessageToOtherUuid = createOutgoingMessageEntity(new AciServiceIdentifier(otherUuid)); messageMetrics.measureAccountOutgoingMessageUuidMismatches(account, outgoingMessageToOtherUuid); counter = findCounter(simpleMeterRegistry); assertEquals(1.0, counter.map(Counter::count).orElse(0.0)); }
@Override public int hashCode() { return Objects.hash(instanceType, filterDatabaseName(this)); }
@Test void assertHashCodeEqualsForProxyMode() { PipelineContextKey contextKey1 = new PipelineContextKey(null, InstanceType.PROXY); PipelineContextKey contextKey2 = new PipelineContextKey("sharding_db", InstanceType.PROXY); assertThat(contextKey1.hashCode(), is(contextKey2.hashCode())); assertThat(contextKey1, is(contextKey2)); }
@Override public Map<MetricName, Metric> getMetrics() { final Map<MetricName, Metric> gauges = new HashMap<>(); gauges.put(MetricName.build("name"), (Gauge<String>) runtime::getName); gauges.put(MetricName.build("vendor"), (Gauge<String>) () -> String.format(Locale.US, "%s %s %s (%s)", runtime.getVmVendor(), runtime.getVmName(), runtime.getVmVersion(), runtime.getSpecVersion())); gauges.put(MetricName.build("uptime"), (Gauge<Long>) runtime::getUptime); return Collections.unmodifiableMap(gauges); }
@Test public void hasAGaugeForTheJVMUptime() throws Exception { final Gauge gauge = (Gauge) gauges.getMetrics().get(MetricName.build("uptime")); assertThat(gauge.getValue()) .isEqualTo(100L); }
@Override public InputStream getInputStream() { return new RedissonInputStream(); }
@Test public void testEmptyRead() throws IOException { RBinaryStream stream = redisson.getBinaryStream("test"); assertThat(stream.getInputStream().read()).isEqualTo(-1); }
@Override public boolean authenticate(final ShardingSphereUser user, final Object[] authInfo) { byte[] authResponse = (byte[]) authInfo[0]; byte[] password = new byte[authResponse.length - 1]; System.arraycopy(authResponse, 0, password, 0, authResponse.length - 1); return Strings.isNullOrEmpty(user.getPassword()) || user.getPassword().equals(new String(password)); }
@Test void assertAuthenticateFailed() { ShardingSphereUser user = new ShardingSphereUser("foo", "password", "%"); byte[] password = "wrong".getBytes(); assertFalse(new MySQLClearPasswordAuthenticator().authenticate(user, new Object[]{password})); }
@Override public void initialize(URI uri, Configuration conf) throws IOException { requireNonNull(uri, "uri is null"); requireNonNull(conf, "conf is null"); super.initialize(uri, conf); setConf(conf); this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); this.workingDirectory = new Path(PATH_SEPARATOR).makeQualified(this.uri, new Path(PATH_SEPARATOR)); HiveS3Config defaults = new HiveS3Config(); this.stagingDirectory = new File(conf.get(S3_STAGING_DIRECTORY, defaults.getS3StagingDirectory().toString())); this.maxAttempts = conf.getInt(S3_MAX_CLIENT_RETRIES, defaults.getS3MaxClientRetries()) + 1; this.maxBackoffTime = Duration.valueOf(conf.get(S3_MAX_BACKOFF_TIME, defaults.getS3MaxBackoffTime().toString())); this.maxRetryTime = Duration.valueOf(conf.get(S3_MAX_RETRY_TIME, defaults.getS3MaxRetryTime().toString())); int maxErrorRetries = conf.getInt(S3_MAX_ERROR_RETRIES, defaults.getS3MaxErrorRetries()); boolean sslEnabled = conf.getBoolean(S3_SSL_ENABLED, defaults.isS3SslEnabled()); Duration connectTimeout = Duration.valueOf(conf.get(S3_CONNECT_TIMEOUT, defaults.getS3ConnectTimeout().toString())); Duration socketTimeout = Duration.valueOf(conf.get(S3_SOCKET_TIMEOUT, defaults.getS3SocketTimeout().toString())); int maxConnections = conf.getInt(S3_MAX_CONNECTIONS, defaults.getS3MaxConnections()); this.multiPartUploadMinFileSize = conf.getLong(S3_MULTIPART_MIN_FILE_SIZE, defaults.getS3MultipartMinFileSize().toBytes()); this.multiPartUploadMinPartSize = conf.getLong(S3_MULTIPART_MIN_PART_SIZE, defaults.getS3MultipartMinPartSize().toBytes()); this.isPathStyleAccess = conf.getBoolean(S3_PATH_STYLE_ACCESS, defaults.isS3PathStyleAccess()); this.useInstanceCredentials = conf.getBoolean(S3_USE_INSTANCE_CREDENTIALS, defaults.isS3UseInstanceCredentials()); this.pinS3ClientToCurrentRegion = conf.getBoolean(S3_PIN_CLIENT_TO_CURRENT_REGION, defaults.isPinS3ClientToCurrentRegion()); this.s3IamRole = conf.get(S3_IAM_ROLE, defaults.getS3IamRole()); this.s3IamRoleSessionName = conf.get(S3_IAM_ROLE_SESSION_NAME, defaults.getS3IamRoleSessionName()); verify(!(useInstanceCredentials && conf.get(S3_IAM_ROLE) != null), "Invalid configuration: either use instance credentials or specify an iam role"); verify((pinS3ClientToCurrentRegion && conf.get(S3_ENDPOINT) == null) || !pinS3ClientToCurrentRegion, "Invalid configuration: either endpoint can be set or S3 client can be pinned to the current region"); this.sseEnabled = conf.getBoolean(S3_SSE_ENABLED, defaults.isS3SseEnabled()); this.sseType = PrestoS3SseType.valueOf(conf.get(S3_SSE_TYPE, defaults.getS3SseType().name())); this.sseKmsKeyId = conf.get(S3_SSE_KMS_KEY_ID, defaults.getS3SseKmsKeyId()); this.s3AclType = PrestoS3AclType.valueOf(conf.get(S3_ACL_TYPE, defaults.getS3AclType().name())); String userAgentPrefix = conf.get(S3_USER_AGENT_PREFIX, defaults.getS3UserAgentPrefix()); this.skipGlacierObjects = conf.getBoolean(S3_SKIP_GLACIER_OBJECTS, defaults.isSkipGlacierObjects()); this.s3StorageClass = conf.getEnum(S3_STORAGE_CLASS, defaults.getS3StorageClass()); ClientConfiguration configuration = new ClientConfiguration() .withMaxErrorRetry(maxErrorRetries) .withProtocol(sslEnabled ? Protocol.HTTPS : Protocol.HTTP) .withConnectionTimeout(toIntExact(connectTimeout.toMillis())) .withSocketTimeout(toIntExact(socketTimeout.toMillis())) .withMaxConnections(maxConnections) .withUserAgentPrefix(userAgentPrefix) .withUserAgentSuffix(S3_USER_AGENT_SUFFIX); this.credentialsProvider = createAwsCredentialsProvider(uri, conf); this.s3 = createAmazonS3Client(conf, configuration); }
@Test public void testDefaultCredentials() throws Exception { Configuration config = new Configuration(); config.setBoolean(S3_USE_INSTANCE_CREDENTIALS, false); try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { fs.initialize(new URI("s3n://test-bucket/"), config); assertInstanceOf(getAwsCredentialsProvider(fs), DefaultAWSCredentialsProviderChain.class); } }
@Override public PathAttributes toAttributes(final DavResource resource) { final PathAttributes attributes = super.toAttributes(resource); final Map<QName, String> properties = resource.getCustomPropsNS(); if(null != properties && properties.containsKey(MicrosoftIISDAVTimestampFeature.LAST_MODIFIED_WIN32_CUSTOM_NAMESPACE)) { final String value = properties.get(MicrosoftIISDAVTimestampFeature.LAST_MODIFIED_WIN32_CUSTOM_NAMESPACE); if(StringUtils.isNotBlank(value)) { try { attributes.setModificationDate(rfc1123.parse(value).getTime()); } catch(InvalidDateException e) { log.warn(String.format("Failure parsing property %s with value %s", MicrosoftIISDAVTimestampFeature.LAST_MODIFIED_WIN32_CUSTOM_NAMESPACE, value)); if(resource.getModified() != null) { attributes.setModificationDate(resource.getModified().getTime()); } } } else { if(log.isDebugEnabled()) { log.debug(String.format("Missing value for property %s", MicrosoftIISDAVTimestampFeature.LAST_MODIFIED_WIN32_CUSTOM_NAMESPACE)); } if(resource.getModified() != null) { attributes.setModificationDate(resource.getModified().getTime()); } } } return attributes; }
@Test public void testCustomModified_PropertyAvailable() throws Exception { final MicrosoftIISDAVAttributesFinderFeature f = new MicrosoftIISDAVAttributesFinderFeature(null); final DavResource mock = mock(DavResource.class); Map<QName, String> map = new HashMap<>(); final String ts = "Mon, 29 Oct 2018 21:14:06 GMT"; map.put(MicrosoftIISDAVTimestampFeature.LAST_MODIFIED_WIN32_CUSTOM_NAMESPACE, ts); when(mock.getModified()).thenReturn(new DateTime("2018-11-01T15:31:57Z").toDate()); when(mock.getCustomPropsNS()).thenReturn(map); final PathAttributes attrs = f.toAttributes(mock); assertEquals(new RFC1123DateFormatter().parse(ts).getTime(), attrs.getModificationDate()); }
@Override public String toString() { StringBuilder sb = new StringBuilder("PartitionRuntimeState [" + stamp + "]{" + System.lineSeparator()); for (PartitionReplica replica : allReplicas) { sb.append(replica).append(System.lineSeparator()); } sb.append(", completedMigrations=").append(completedMigrations); sb.append('}'); return sb.toString(); }
@Test public void toString_whenDeserialized() throws UnknownHostException { PartitionRuntimeState state = createPartitionState(0, replica("127.0.0.1", 5701), replica("127.0.0.2", 5702) ); state = serializeAndDeserialize(state); assertContains(state.toString(), "127.0.0.1"); assertContains(state.toString(), "127.0.0.2"); }
public static Permission getPermission(String name, String serviceName, String... actions) { PermissionFactory permissionFactory = PERMISSION_FACTORY_MAP.get(serviceName); if (permissionFactory == null) { throw new IllegalArgumentException("No permissions found for service: " + serviceName); } return permissionFactory.create(name, actions); }
@Test public void getPermission_AtomicReference() { Permission permission = ActionConstants.getPermission("foo", AtomicRefServiceUtil.SERVICE_NAME); assertNotNull(permission); assertTrue(permission instanceof AtomicReferencePermission); }
public static String getSystemPropertiesToLog(Configuration conf) { String key = conf.get(MRJobConfig.MAPREDUCE_JVM_SYSTEM_PROPERTIES_TO_LOG, MRJobConfig.DEFAULT_MAPREDUCE_JVM_SYSTEM_PROPERTIES_TO_LOG); if (key != null) { key = key.trim(); // trim leading and trailing whitespace from the config if (!key.isEmpty()) { String[] props = key.split(","); if (props.length > 0) { StringBuilder sb = new StringBuilder(); sb.append("\n/************************************************************\n"); sb.append("[system properties]\n"); for (String prop: props) { prop = prop.trim(); // trim leading and trailing whitespace if (!prop.isEmpty()) { sb.append(prop).append(": ").append(System.getProperty(prop)).append('\n'); } } sb.append("************************************************************/"); return sb.toString(); } } } return null; }
@Test public void testLogSystemProperties() throws Exception { Configuration conf = new Configuration(); // test no logging conf.set(MRJobConfig.MAPREDUCE_JVM_SYSTEM_PROPERTIES_TO_LOG, " "); String value = MRApps.getSystemPropertiesToLog(conf); assertNull(value); // test logging of selected keys String classpath = "java.class.path"; String os = "os.name"; String version = "java.version"; conf.set(MRJobConfig.MAPREDUCE_JVM_SYSTEM_PROPERTIES_TO_LOG, classpath + ", " + os); value = MRApps.getSystemPropertiesToLog(conf); assertNotNull(value); assertTrue(value.contains(classpath)); assertTrue(value.contains(os)); assertFalse(value.contains(version)); }
public Collection<JID> getAdmins() { return administrators; }
@Test public void testRemoveFullJid() throws Exception { // Setup test fixture. final String groupName = "unit-test-group-k"; final Group group = groupManager.createGroup(groupName); final JID fullJid = new JID("unit-test-user-k", "example.org", "unit-test-resource-k"); final JID bareJid = fullJid.asBareJID(); group.getAdmins().add(bareJid); // Execute system under test. final boolean result = group.getAdmins().remove(fullJid); // Verify results. assertTrue(result); assertFalse(group.getAdmins().contains(fullJid)); assertFalse(group.getAdmins().contains(bareJid)); }
public static Optional<KiePMMLModel> getFromCommonDataAndTransformationDictionaryAndModelWithSources(final CompilationDTO compilationDTO) { logger.trace("getFromCommonDataAndTransformationDictionaryAndModelWithSources {}", compilationDTO); final Function<ModelImplementationProvider<Model, KiePMMLModel>, KiePMMLModel> modelFunction = implementation -> implementation.getKiePMMLModelWithSources(compilationDTO); return getFromCommonDataAndTransformationDictionaryAndModelWithSourcesCommon(compilationDTO.getFields(), compilationDTO.getModel(), modelFunction); }
@Test void getFromDataDictionaryAndModelWithSourcesWithoutProvider() throws Exception { String fileName = ONE_MINING_TARGET_SOURCE.substring(0, ONE_MINING_TARGET_SOURCE.lastIndexOf('.')); pmml = KiePMMLUtil.load(getFileInputStream(ONE_MINING_TARGET_SOURCE), ONE_MINING_TARGET_SOURCE); final CommonCompilationDTO compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, pmml.getModels().get(0), new PMMLCompilationContextMock(), fileName); final Optional<KiePMMLModel> retrieved = getFromCommonDataAndTransformationDictionaryAndModelWithSources(compilationDTO); assertThat(retrieved).isNotNull(); assertThat(retrieved).isNotPresent(); }
public static Schema convertToSchema(LogicalType schema) { return convertToSchema(schema, true); }
@Test void testInvalidRawTypeAvroSchemaConversion() { RowType rowType = (RowType) ResolvedSchema.of( Column.physical("a", DataTypes.STRING()), Column.physical( "b", DataTypes.RAW(Void.class, VoidSerializer.INSTANCE))) .toSourceRowDataType() .getLogicalType(); assertThatThrownBy(() -> AvroSchemaConverter.convertToSchema(rowType)) .isInstanceOf(UnsupportedOperationException.class) .hasMessageStartingWith("Unsupported to derive Schema for type: RAW"); }
private void announceBackgroundJobServer() { final BackgroundJobServerStatus serverStatus = backgroundJobServer.getServerStatus(); storageProvider.announceBackgroundJobServer(serverStatus); determineIfCurrentBackgroundJobServerIsMaster(); lastSignalAlive = serverStatus.getLastHeartbeat(); }
@Test void masterDoesZookeepingAndKeepsHisMasterStatus() { backgroundJobServer.start(); storageProvider.announceBackgroundJobServer(anotherServer()); await() .pollInterval(ONE_SECOND) //.conditionEvaluationListener(condition -> System.out.printf("%s (elapsed time %dms, remaining time %dms)\n", condition.getDescription(), condition.getElapsedTimeInMS(), condition.getRemainingTimeInMS())) .atLeast(1, TimeUnit.SECONDS) .atMost(8, TimeUnit.SECONDS) .untilAsserted(() -> assertThat(storageProvider.getBackgroundJobServers()).hasSize(1)); assertThat(backgroundJobServer.isMaster()).isTrue(); verify(storageProvider, atLeastOnce()).removeTimedOutBackgroundJobServers(any()); verify(storageProvider, atMost(2)).removeTimedOutBackgroundJobServers(any()); }
@Override public String getPrivPassword() { return privPassword; }
@Test public void testGetPrivPassword() { assertEquals(privPassword, defaultSnmpv3Device.getPrivPassword()); }
@Override protected void write(final PostgreSQLPacketPayload payload) { payload.writeInt2(data.size()); for (Object each : data) { if (each instanceof BinaryCell) { writeBinaryValue(payload, (BinaryCell) each); } else { writeTextValue(payload, each); } } }
@Test void assertWriteBinaryInt4() { final int value = 12345678; PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singleton(new BinaryCell(PostgreSQLColumnType.INT4, value))); actual.write(payload); verify(payload).writeInt2(1); verify(payload).writeInt4(4); verify(payload).writeInt4(value); }
public Map<String, String> build() { Map<String, String> builder = new HashMap<>(); configureFileSystem(builder); configureNetwork(builder); configureCluster(builder); configureSecurity(builder); configureOthers(builder); LOGGER.info("Elasticsearch listening on [HTTP: {}:{}, TCP: {}:{}]", builder.get(ES_HTTP_HOST_KEY), builder.get(ES_HTTP_PORT_KEY), builder.get(ES_TRANSPORT_HOST_KEY), builder.get(ES_TRANSPORT_PORT_KEY)); return builder; }
@Test public void cluster_is_enabled_with_defined_initialTimeout() throws Exception { Props props = minProps(CLUSTER_ENABLED); props.set(SEARCH_INITIAL_STATE_TIMEOUT.getKey(), "10s"); Map<String, String> settings = new EsSettings(props, new EsInstallation(props), system).build(); assertThat(settings).containsEntry("discovery.initial_state_timeout", "10s"); }
@Override public MigratablePipeline findPipelineToMigrate(LoadImbalance imbalance) { Set<? extends MigratablePipeline> candidates = imbalance.getPipelinesOwnedBy(imbalance.srcOwner); int pipelineCount = candidates.size(); int selected = random.nextInt(pipelineCount); Iterator<? extends MigratablePipeline> iterator = candidates.iterator(); for (int i = 0; i < selected; i++) { iterator.next(); } return iterator.next(); }
@Test public void findPipelineToMigrate_shouldWorkEvenWithASinglePipelineAvailable() { MigratablePipeline pipeline = mock(MigratablePipeline.class); ownerPipelines.put(imbalance.srcOwner, Set.of(pipeline)); MigratablePipeline pipelineToMigrate = strategy.findPipelineToMigrate(imbalance); assertEquals(pipeline, pipelineToMigrate); }
public RuntimeOptionsBuilder parse(Map<String, String> properties) { return parse(properties::get); }
@Test void should_parse_plugin_publish_disabled_and_publish_token() { properties.put(Constants.PLUGIN_PUBLISH_ENABLED_PROPERTY_NAME, "false"); properties.put(Constants.PLUGIN_PUBLISH_TOKEN_PROPERTY_NAME, "some/value"); RuntimeOptions options = cucumberPropertiesParser .parse(properties) .enablePublishPlugin() .build(); assertThat(options.plugins(), empty()); }
@Override public String getTaskType() { return PLUGGABLE_TASK_PREFIX + "_" + getPluginConfiguration().getId().replaceAll("[^a-zA-Z0-9_]", "_"); }
@Test public void taskTypeShouldBeSanitizedToHaveNoSpecialCharacters() throws Exception { assertThat(new PluggableTask(new PluginConfiguration("abc.def", "1"), new Configuration()).getTaskType(), is("pluggable_task_abc_def")); assertThat(new PluggableTask(new PluginConfiguration("abc_def", "1"), new Configuration()).getTaskType(), is("pluggable_task_abc_def")); assertThat(new PluggableTask(new PluginConfiguration("abcdef", "1"), new Configuration()).getTaskType(), is("pluggable_task_abcdef")); assertThat(new PluggableTask(new PluginConfiguration("abc#def", "1"), new Configuration()).getTaskType(), is("pluggable_task_abc_def")); assertThat(new PluggableTask(new PluginConfiguration("abc#__def", "1"), new Configuration()).getTaskType(), is("pluggable_task_abc___def")); assertThat(new PluggableTask(new PluginConfiguration("Abc#dEF", "1"), new Configuration()).getTaskType(), is("pluggable_task_Abc_dEF")); assertThat(new PluggableTask(new PluginConfiguration("1234567890#ABCDEF", "1"), new Configuration()).getTaskType(), is("pluggable_task_1234567890_ABCDEF")); }
static MapKeyLoader.Role assignRole(boolean isPartitionOwner, boolean isMapNamePartition, boolean isMapNamePartitionFirstReplica) { if (isMapNamePartition) { if (isPartitionOwner) { // map-name partition owner is the SENDER return MapKeyLoader.Role.SENDER; } else { if (isMapNamePartitionFirstReplica) { // first replica of the map-name partition is the SENDER_BACKUP return MapKeyLoader.Role.SENDER_BACKUP; } else { // other replicas of the map-name partition do not have a role return MapKeyLoader.Role.NONE; } } } else { // ordinary partition owners are RECEIVERs, otherwise no role return isPartitionOwner ? MapKeyLoader.Role.RECEIVER : MapKeyLoader.Role.NONE; } }
@Test public void assignRole_RECEIVER_insignificantFlagTrue() { boolean isPartitionOwner = true; boolean isMapNamePartition = false; boolean insignificant = true; Role role = MapKeyLoaderUtil.assignRole(isPartitionOwner, isMapNamePartition, insignificant); assertEquals(RECEIVER, role); }
@Override public void close() throws IOException { if (mClosed.getAndSet(true)) { return; } mLocalOutputStream.close(); try { GSObject obj = new GSObject(mKey); obj.setBucketName(mBucketName); obj.setDataInputFile(mFile); obj.setContentLength(mFile.length()); obj.setContentType(Mimetypes.MIMETYPE_BINARY_OCTET_STREAM); if (mHash != null) { obj.setMd5Hash(mHash.digest()); } else { LOG.warn("MD5 was not computed for: {}", mKey); } mContentHash = mClient.putObject(mBucketName, obj).getMd5HashAsBase64(); } catch (ServiceException e) { LOG.error("Failed to upload {}.", mKey); throw new IOException(e); } finally { // Delete the temporary file on the local machine if the GCS client completed the // upload or if the upload failed. if (!mFile.delete()) { LOG.error("Failed to delete temporary file @ {}", mFile.getPath()); } } }
@Test @PrepareForTest(GCSOutputStream.class) public void testConstructor() throws Exception { PowerMockito.whenNew(File.class).withArguments(Mockito.anyString()).thenReturn(mFile); String errorMessage = "protocol doesn't support output"; PowerMockito.whenNew(FileOutputStream.class).withArguments(mFile) .thenThrow(new IOException(errorMessage)); mThrown.expect(IOException.class); mThrown.expectMessage(errorMessage); new GCSOutputStream("testBucketName", "testKey", mClient, sConf.getList(PropertyKey.TMP_DIRS)).close(); }
public static Object construct(String className) throws JMeterException { Object instance = null; try { instance = ClassUtils.getClass(className).getDeclaredConstructor().newInstance(); } catch (IllegalArgumentException | ReflectiveOperationException | SecurityException e) { throw new JMeterException(e); } return instance; }
@Test public void testConstructInt() throws JMeterException { Integer dummy = (Integer) ClassTools.construct("java.lang.Integer", 23); assertNotNull(dummy); assertEquals(Integer.valueOf(23), dummy); }
@Override public void validate(String methodName, Class<?>[] parameterTypes, Object[] arguments) throws Exception { List<Class<?>> groups = new ArrayList<>(); Class<?> methodClass = methodClass(methodName); if (methodClass != null) { groups.add(methodClass); } Method method = clazz.getMethod(methodName, parameterTypes); Class<?>[] methodClasses; if (method.isAnnotationPresent(MethodValidated.class)) { methodClasses = method.getAnnotation(MethodValidated.class).value(); groups.addAll(Arrays.asList(methodClasses)); } // add into default group groups.add(0, Default.class); groups.add(1, clazz); // convert list to array Class<?>[] classGroups = groups.toArray(new Class[0]); Set<ConstraintViolation<?>> violations = new HashSet<>(); Object parameterBean = getMethodParameterBean(clazz, method, arguments); if (parameterBean != null) { violations.addAll(validator.validate(parameterBean, classGroups)); } for (Object arg : arguments) { validate(violations, arg, classGroups); } if (!violations.isEmpty()) { logger.info("Failed to validate service: " + clazz.getName() + ", method: " + methodName + ", cause: " + violations); throw new ConstraintViolationException( "Failed to validate service: " + clazz.getName() + ", method: " + methodName + ", cause: " + violations, violations); } }
@Test void testItWithCollectionArg() throws Exception { URL url = URL.valueOf("test://test:11/org.apache.dubbo.validation.support.jvalidation.mock.JValidatorTestTarget"); JValidator jValidator = new JValidator(url); jValidator.validate( "someMethod4", new Class<?>[] {List.class}, new Object[] {Collections.singletonList("parameter")}); }
@Override public DirectoryTimestamp getDirectoryTimestamp() { return DirectoryTimestamp.explicit; }
@Test public void testFeatures() { assertEquals(Protocol.Case.sensitive, new MantaProtocol().getCaseSensitivity()); assertEquals(Protocol.DirectoryTimestamp.explicit, new MantaProtocol().getDirectoryTimestamp()); }
void restoreBatch(final Collection<ConsumerRecord<byte[], byte[]>> records) { // compute the observed stream time at the end of the restore batch, in order to speed up // restore by not bothering to read from/write to segments which will have expired by the // time the restoration process is complete. long endOfBatchStreamTime = observedStreamTime; for (final ConsumerRecord<byte[], byte[]> record : records) { endOfBatchStreamTime = Math.max(endOfBatchStreamTime, record.timestamp()); } final VersionedStoreClient<?> restoreClient = restoreWriteBuffer.getClient(); // note: there is increased risk for hitting an out-of-memory during this restore loop, // compared to for non-versioned key-value stores, because this versioned store // implementation stores multiple records (for the same key) together in a single RocksDB // "segment" entry -- restoring a single changelog entry could require loading multiple // records into memory. how high this memory amplification will be is very much dependent // on the specific workload and the value of the "segment interval" parameter. synchronized (position) { for (final ConsumerRecord<byte[], byte[]> record : records) { if (record.timestamp() < observedStreamTime - gracePeriod) { // record is older than grace period and was therefore never written to the store continue; } // advance observed stream time as usual, for use in deciding whether records have // exceeded the store's grace period and should be dropped. observedStreamTime = Math.max(observedStreamTime, record.timestamp()); ChangelogRecordDeserializationHelper.applyChecksAndUpdatePosition( record, consistencyEnabled, position ); // put records to write buffer doPut( restoreClient, endOfBatchStreamTime, new Bytes(record.key()), record.value(), record.timestamp() ); } try { restoreWriteBuffer.flush(); } catch (final RocksDBException e) { throw new ProcessorStateException("Error restoring batch to store " + name, e); } } }
@Test public void shouldRestore() { final List<DataRecord> records = new ArrayList<>(); records.add(new DataRecord("k", "vp20", SEGMENT_INTERVAL + 20)); records.add(new DataRecord("k", "vp10", SEGMENT_INTERVAL + 10)); records.add(new DataRecord("k", "vn10", SEGMENT_INTERVAL - 10)); records.add(new DataRecord("k", "vn2", SEGMENT_INTERVAL - 2)); records.add(new DataRecord("k", "vn1", SEGMENT_INTERVAL - 1)); records.add(new DataRecord("k", "vp1", SEGMENT_INTERVAL + 1)); store.restoreBatch(getChangelogRecords(records)); verifyGetValueFromStore("k", "vp20", SEGMENT_INTERVAL + 20); verifyTimestampedGetValueFromStore("k", SEGMENT_INTERVAL + 30, "vp20", SEGMENT_INTERVAL + 20, PUT_RETURN_CODE_VALID_TO_UNDEFINED); verifyTimestampedGetValueFromStore("k", SEGMENT_INTERVAL + 15, "vp10", SEGMENT_INTERVAL + 10, SEGMENT_INTERVAL + 20); verifyTimestampedGetValueFromStore("k", SEGMENT_INTERVAL + 5, "vp1", SEGMENT_INTERVAL + 1, SEGMENT_INTERVAL + 10); verifyTimestampedGetValueFromStore("k", SEGMENT_INTERVAL, "vn1", SEGMENT_INTERVAL - 1, SEGMENT_INTERVAL + 1); verifyTimestampedGetValueFromStore("k", SEGMENT_INTERVAL - 1, "vn1", SEGMENT_INTERVAL - 1, SEGMENT_INTERVAL + 1); verifyTimestampedGetValueFromStore("k", SEGMENT_INTERVAL - 2, "vn2", SEGMENT_INTERVAL - 2, SEGMENT_INTERVAL - 1); verifyTimestampedGetValueFromStore("k", SEGMENT_INTERVAL - 5, "vn10", SEGMENT_INTERVAL - 10, SEGMENT_INTERVAL - 2); }
public static FieldScope fromSetFields(Message message) { return fromSetFields( message, AnyUtils.defaultTypeRegistry(), AnyUtils.defaultExtensionRegistry()); }
@Test public void testFromSetFields_skipNulls() { Message message1 = parse("o_int: 1 r_string: \"foo\" r_string: \"bar\""); Message eqMessage1 = parse("o_int: 1 r_string: \"foo\" r_string: \"bar\""); Message eqIgnoredMessage1 = parse("o_int: 2 r_string: \"foo\" r_string: \"bar\""); Message message2 = parse("o_int: 3 r_string: \"baz\" r_string: \"qux\""); Message eqMessage2 = parse("o_int: 3 r_string: \"baz\" r_string: \"qux\""); Message eqIgnoredMessage2 = parse("o_int: 4 r_string: \"baz\" r_string: \"qux\""); List<Message> messages = Lists.newArrayList(); Message nullMessage = null; messages.add(parse("o_int: -1")); messages.add(nullMessage); messages.add(parse("r_string: \"NaN\"")); expectThat(listOf(message1, message2)) .withPartialScope(FieldScopes.fromSetFields(messages)) .containsExactly(eqMessage1, eqMessage2); expectThat(listOf(message1, message2)) .withPartialScope( FieldScopes.fromSetFields(parse("o_int: -1"), nullMessage, parse("r_string: \"NaN\""))) .containsExactly(eqMessage1, eqMessage2); expectFailureWhenTesting() .that(listOf(message1, message2)) .withPartialScope(FieldScopes.fromSetFields(messages)) .containsExactly(eqIgnoredMessage1, eqIgnoredMessage2); expectThatFailure() .factValue("testing whether") .contains( "is equivalent according to " + "assertThat(proto)" + ".withPartialScope(" + "FieldScopes.fromSetFields([" + "{o_int: -1\n}, null, {r_string: \"NaN\"\n}]))" + ".isEqualTo(target)"); expectFailureWhenTesting() .that(listOf(message1, message2)) .withPartialScope( FieldScopes.fromSetFields(parse("o_int: -1"), nullMessage, parse("r_string: \"NaN\""))) .containsExactly(eqIgnoredMessage1, eqIgnoredMessage2); expectThatFailure() .factValue("testing whether") .contains( "is equivalent according to " + "assertThat(proto)" + ".withPartialScope(" + "FieldScopes.fromSetFields([" + "{o_int: -1\n}, null, {r_string: \"NaN\"\n}]))" + ".isEqualTo(target)"); }
private long replayStartPosition(final RecordingLog.Entry lastTerm) { return replayStartPosition(lastTerm, snapshotsRetrieved, ctx.initialReplayStart(), backupArchive); }
@Test void shouldReturnReplayStartPositionIfAlreadyExisting() { final long expectedStartPosition = 892374; final long recordingId = 234; final RecordingLog.Entry lastTerm = new RecordingLog.Entry( recordingId, 0, 0, expectedStartPosition, 0, 0, 0, null, true, 0); when(mockAeronArchive.getStopPosition(anyLong())).thenReturn(expectedStartPosition); final long replayStartPosition = replayStartPosition( lastTerm, emptyList(), ReplayStart.BEGINNING, mockAeronArchive); assertEquals(expectedStartPosition, replayStartPosition); }
@Override public PageResult<JobDO> getJobPage(JobPageReqVO pageReqVO) { return jobMapper.selectPage(pageReqVO); }
@Test public void testGetJobPage() { // mock 数据 JobDO dbJob = randomPojo(JobDO.class, o -> { o.setName("定时任务测试"); o.setHandlerName("handlerName 单元测试"); o.setStatus(JobStatusEnum.INIT.getStatus()); }); jobMapper.insert(dbJob); // 测试 name 不匹配 jobMapper.insert(cloneIgnoreId(dbJob, o -> o.setName("土豆"))); // 测试 status 不匹配 jobMapper.insert(cloneIgnoreId(dbJob, o -> o.setStatus(JobStatusEnum.NORMAL.getStatus()))); // 测试 handlerName 不匹配 jobMapper.insert(cloneIgnoreId(dbJob, o -> o.setHandlerName(randomString()))); // 准备参数 JobPageReqVO reqVo = new JobPageReqVO(); reqVo.setName("定时"); reqVo.setStatus(JobStatusEnum.INIT.getStatus()); reqVo.setHandlerName("单元"); // 调用 PageResult<JobDO> pageResult = jobService.getJobPage(reqVo); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbJob, pageResult.getList().get(0)); }
public String getClientReturnId(String sessionId) { Optional<OpenIdSession> session = openIdRepository.findById(sessionId); if (session.isEmpty()) return null; OpenIdSession openIdSession = session.get(); var returnUrl = openIdSession.getRedirectUri() + "?state=" + openIdSession.getState(); if (!"success".equals(openIdSession.getAuthenticationState())) { return returnUrl + "&error=CANCELLED"; } return returnUrl + "&code=" + openIdSession.getCode(); }
@Test void getClientReturnIdTest() { OpenIdSession openIdSession = new OpenIdSession(); openIdSession.setSessionId("sessionId"); openIdSession.setRedirectUri("testRedirectUrl"); openIdSession.setState("testState"); openIdSession.setCode("testCode"); openIdSession.setAuthenticationState("success"); when(httpServletRequest.getSession()).thenReturn(httpSession); when(openIdRepository.findById(anyString())).thenReturn(Optional.of(openIdSession)); String response = openIdService.getClientReturnId("sessionId"); assertEquals("testRedirectUrl?state=testState&code=testCode", response); }
@Override public List<PrivilegedOperation> bootstrap(Configuration conf) throws ResourceHandlerException { super.bootstrap(conf); swappiness = conf .getInt(YarnConfiguration.NM_MEMORY_RESOURCE_CGROUPS_SWAPPINESS, YarnConfiguration.DEFAULT_NM_MEMORY_RESOURCE_CGROUPS_SWAPPINESS); if (swappiness < 0 || swappiness > 100) { throw new ResourceHandlerException( "Illegal value '" + swappiness + "' for " + YarnConfiguration.NM_MEMORY_RESOURCE_CGROUPS_SWAPPINESS + ". Value must be between 0 and 100."); } return null; }
@Test public void testPreStart() throws Exception { Configuration conf = new Configuration(); conf.setBoolean(YarnConfiguration.NM_PMEM_CHECK_ENABLED, false); conf.setBoolean(YarnConfiguration.NM_VMEM_CHECK_ENABLED, false); cGroupsMemoryResourceHandler.bootstrap(conf); String id = "container_01_01"; String path = "test-path/" + id; ContainerId mockContainerId = mock(ContainerId.class); when(mockContainerId.toString()).thenReturn(id); Container mockContainer = mock(Container.class); when(mockContainer.getContainerId()).thenReturn(mockContainerId); when(mockCGroupsHandler .getPathForCGroupTasks(CGroupsHandler.CGroupController.MEMORY, id)) .thenReturn(path); int memory = 1024; when(mockContainer.getResource()) .thenReturn(Resource.newInstance(memory, 1)); List<PrivilegedOperation> ret = cGroupsMemoryResourceHandler.preStart(mockContainer); verify(mockCGroupsHandler, times(1)) .createCGroup(CGroupsHandler.CGroupController.MEMORY, id); verify(mockCGroupsHandler, times(1)) .updateCGroupParam(CGroupsHandler.CGroupController.MEMORY, id, CGroupsHandler.CGROUP_PARAM_MEMORY_HARD_LIMIT_BYTES, String.valueOf(memory) + "M"); verify(mockCGroupsHandler, times(1)) .updateCGroupParam(CGroupsHandler.CGroupController.MEMORY, id, CGroupsHandler.CGROUP_PARAM_MEMORY_SOFT_LIMIT_BYTES, String.valueOf((int) (memory * 0.9)) + "M"); verify(mockCGroupsHandler, times(1)) .updateCGroupParam(CGroupsHandler.CGroupController.MEMORY, id, CGroupsHandler.CGROUP_PARAM_MEMORY_SWAPPINESS, String.valueOf(0)); Assert.assertNotNull(ret); Assert.assertEquals(1, ret.size()); PrivilegedOperation op = ret.get(0); Assert.assertEquals(PrivilegedOperation.OperationType.ADD_PID_TO_CGROUP, op.getOperationType()); List<String> args = op.getArguments(); Assert.assertEquals(1, args.size()); Assert.assertEquals(PrivilegedOperation.CGROUP_ARG_PREFIX + path, args.get(0)); }
public static Map<String, String> objectToMap(Object object) { if (object == null) { return null; } Map<String, String> map = new HashMap<>(16); Field[] fields = object.getClass().getDeclaredFields(); try { for (Field field : fields) { boolean accessible = field.isAccessible(); field.setAccessible(true); if (field.getType() == Date.class) { Date date = (Date) field.get(object); if (date != null) { map.put(field.getName(), String.valueOf(date.getTime())); } } else { map.put(field.getName(), field.get(object) == null ? "" : field.get(object).toString()); } field.setAccessible(accessible); } } catch (IllegalAccessException e) { throw new NotSupportYetException( "object " + object.getClass().toString() + " to map failed:" + e.getMessage()); } return map; }
@Test public void testObjectToMap() { BranchDO branchDO = new BranchDO("xid123123", 123L, 1, 2.2, new Date()); Map<String, String> map = BeanUtils.objectToMap(branchDO); Assertions.assertEquals(branchDO.getXid(), map.get("xid")); Assertions.assertEquals(branchDO.getTransactionId(), Long.valueOf(map.get("transactionId"))); Assertions.assertEquals(branchDO.getStatus(), Integer.valueOf(map.get("status"))); Assertions.assertEquals(branchDO.getTest(), Double.valueOf(map.get("test"))); Assertions.assertEquals(branchDO.getGmtCreate().getTime(),Long.valueOf(map.get("gmtCreate"))); Assertions.assertNull(BeanUtils.objectToMap(null)); // date is null / field is null branchDO = new BranchDO("xid123123", null, 1, 2.2, null); map = BeanUtils.objectToMap(branchDO); Assertions.assertNull(map.get("gmtCreate")); Assertions.assertEquals("", map.get("transactionId")); }
@Override public ConfigDef config() { return CONFIG_DEF; }
@Test public void testPatternIsValidRegexInConfig() { Map<String, String> props = new HashMap<>(); props.put("pattern", "["); ConfigException e = assertThrows(ConfigException.class, () -> config(props)); assertTrue(e.getMessage().contains("Invalid regex")); }
@Override public Address translate(Address address) throws Exception { if (address == null) { return null; } Address publicAddress = response.getPrivateToPublicAddresses().get(address); if (publicAddress != null) { return publicAddress; } response = discovery.discoverNodes(); return response.getPrivateToPublicAddresses().get(address); }
@Test public void testTranslate() throws Exception { setUp(); ViridianAddressProvider provider = new ViridianAddressProvider(createDiscovery()); provider.loadAddresses(createListenerRunner()); assertEquals(PUBLIC_MEMBER_ADDRESS, provider.translate(PRIVATE_MEMBER_ADDRESS)); assertNull(provider.translate(PRIVATE_TPC_ADDRESS)); assertNull(provider.translate(NON_EXISTENT_PRIVATE_ADDRESS)); }
@Override protected CompletableFuture<LogListInfo> handleRequest( @Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull ResourceManagerGateway gateway) throws RestHandlerException { final ResourceID taskManagerId = request.getPathParameter(TaskManagerIdPathParameter.class); final ResourceManagerGateway resourceManagerGateway = getResourceManagerGateway(resourceManagerGatewayRetriever); final CompletableFuture<Collection<LogInfo>> logsWithLengthFuture = resourceManagerGateway.requestTaskManagerLogList(taskManagerId, timeout); return logsWithLengthFuture .thenApply(LogListInfo::new) .exceptionally( (throwable) -> { final Throwable strippedThrowable = ExceptionUtils.stripCompletionException(throwable); if (strippedThrowable instanceof UnknownTaskExecutorException) { throw new CompletionException( new RestHandlerException( "Could not find TaskExecutor " + taskManagerId, HttpResponseStatus.NOT_FOUND, strippedThrowable)); } else { throw new CompletionException(throwable); } }); }
@Test void testGetTaskManagerLogsList() throws Exception { List<LogInfo> logsList = Arrays.asList( new LogInfo("taskmanager.log", 1024L, 1632844800000L), new LogInfo("taskmanager.out", 1024L, 1632844800000L), new LogInfo("taskmanager-2.out", 1024L, 1632844800000L)); resourceManagerGateway.setRequestTaskManagerLogListFunction( EXPECTED_TASK_MANAGER_ID -> CompletableFuture.completedFuture(logsList)); LogListInfo logListInfo = taskManagerLogListHandler .handleRequest(handlerRequest, resourceManagerGateway) .get(); assertThat(logListInfo.getLogInfos()).containsExactlyInAnyOrderElementsOf(logsList); }
@PostMapping("/plugin/selectorAndRules") public Mono<String> selectorAndRules(@RequestBody final SelectorRulesData selectorRulesData) { SelectorData selectorData = SelectorData.builder() .pluginName(selectorRulesData.getPluginName()) .handle(selectorRulesData.getSelectorHandler()) .matchMode(selectorRulesData.getMatchMode()) .conditionList(selectorRulesData.getConditionDataList()) .type(SelectorTypeEnum.CUSTOM_FLOW.getCode()) .sort(Optional.ofNullable(selectorRulesData.getSort()).orElse(10)) .build(); SelectorData result = buildDefaultSelectorData(selectorData); subscriber.onSelectorSubscribe(result); saveDiscoveryUpstreamData(result); List<RuleLocalData> ruleDataList = selectorRulesData.getRuleDataList(); for (RuleLocalData data : ruleDataList) { RuleData ruleData = RuleData.builder() .selectorId(result.getId()) .pluginName(result.getPluginName()) .name(data.getRuleName()) .matchMode(data.getMatchMode()) .handle(data.getRuleHandler()) .conditionDataList(data.getConditionDataList()) .build(); subscriber.onRuleSubscribe(buildDefaultRuleData(ruleData)); } return Mono.just(Constants.SUCCESS); }
@Test public void testSelectorAndRules() throws Exception { final LocalPluginController.SelectorRulesData selectorRulesData = new LocalPluginController.SelectorRulesData(); selectorRulesData.setPluginName("pluginName"); selectorRulesData.setSelectorName("selectorName"); selectorRulesData.setSelectorHandler("[]"); selectorRulesData.setMatchMode(0); LocalPluginController.RuleLocalData ruleLocalData = new LocalPluginController.RuleLocalData(); ruleLocalData.setRuleName("ruleName"); ruleLocalData.setRuleHandler("{}"); ruleLocalData.setMatchMode(0); ruleLocalData.setConditionDataList(Collections.emptyList()); selectorRulesData.setRuleDataList(Collections.singletonList(ruleLocalData)); selectorRulesData.setConditionDataList(Collections.emptyList()); this.mockMvc .perform(MockMvcRequestBuilders.post("/shenyu/plugin/selectorAndRules") .content(GsonUtils.getGson().toJson(selectorRulesData)) .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andReturn(); Assertions.assertNotNull(baseDataCache.obtainSelectorData(selectorRulesData.getPluginName())); Assertions.assertEquals(selectorRulesData.getSelectorName(), "selectorName"); }
public <V> Match<V> map(Function<T, V> mapper) { if (matchAny) { return any(); } else if (value == null) { return negation ? ifNotNull() : ifNull(); } else { return negation ? ifNotValue(mapper.apply(value)) : ifValue(mapper.apply(value)); } }
@Test public void testMap() { Match<String> m1 = Match.ifNull(); assertEquals(m1.map(s -> "bar"), Match.ifNull()); Match<String> m2 = Match.ifValue("foo"); Match<String> m3 = m2.map(s -> "bar"); assertTrue(m3.matches("bar")); }
@Override public String getName() { return SuccessCircuitBreakerReporter.class.getName(); }
@Test public void testGetName() { assertThat(successCircuitBreakerReporter.getName()).isEqualTo(SuccessCircuitBreakerReporter.class.getName()); }
static void encodePublicationRemoval( final UnsafeBuffer encodingBuffer, final int offset, final int captureLength, final int length, final String channel, final int sessionId, final int streamId) { int encodedLength = encodeLogHeader(encodingBuffer, offset, captureLength, length); encodingBuffer.putInt(offset + encodedLength, sessionId, LITTLE_ENDIAN); encodedLength += SIZE_OF_INT; encodingBuffer.putInt(offset + encodedLength, streamId, LITTLE_ENDIAN); encodedLength += SIZE_OF_INT; encodeTrailingString(encodingBuffer, offset + encodedLength, captureLength - SIZE_OF_INT * 2, channel); }
@Test void encodePublicationRemovalShouldTruncateChannelIfItExceedsMaxMessageLength() { final int offset = 121; final char[] data = new char[MAX_EVENT_LENGTH]; fill(data, 'z'); final String channel = new String(data); final int length = data.length + 3 * SIZE_OF_INT; final int captureLength = captureLength(length); encodePublicationRemoval(buffer, offset, captureLength, length, channel, 1, -1); assertEquals(captureLength, buffer.getInt(offset, LITTLE_ENDIAN)); assertEquals(length, buffer.getInt(offset + SIZE_OF_INT, LITTLE_ENDIAN)); assertNotEquals(0, buffer.getLong(offset + SIZE_OF_INT * 2, LITTLE_ENDIAN)); assertEquals(1, buffer.getInt(offset + LOG_HEADER_LENGTH, LITTLE_ENDIAN)); assertEquals(-1, buffer.getInt(offset + LOG_HEADER_LENGTH + SIZE_OF_INT, LITTLE_ENDIAN)); assertEquals(channel.substring(0, captureLength - 3 * SIZE_OF_INT - 3) + "...", buffer.getStringAscii(offset + LOG_HEADER_LENGTH + SIZE_OF_INT * 2, LITTLE_ENDIAN)); }
@Override @SuppressWarnings("nullness") public boolean write(String tableName, List<Map<String, Object>> rows) throws JDBCResourceManagerException { if (rows.size() == 0) { return false; } LOG.info("Attempting to write {} rows to {}.{}.", rows.size(), databaseName, tableName); try (Connection con = driver.getConnection(getUri(), username, password)) { Statement stmt = con.createStatement(); for (Map<String, Object> row : rows) { List<String> columns = new ArrayList<>(row.keySet()); StringBuilder sql = new StringBuilder("INSERT INTO ") .append(tableName) .append("(") .append(String.join(",", columns)) .append(") VALUES ("); List<String> valueList = new ArrayList<>(); for (String colName : columns) { Object value = row.get(colName); if (value == null) { valueList.add(null); } else if (NumberUtils.isCreatable(value.toString()) || "true".equalsIgnoreCase(value.toString()) || "false".equalsIgnoreCase(value.toString()) || value.toString().startsWith("ARRAY[")) { valueList.add(String.valueOf(value)); } else { valueList.add("'" + value + "'"); } } sql.append(String.join(",", valueList)).append(")"); try { LOG.info("Running SQL statement: " + sql); stmt.executeUpdate(sql.toString()); } catch (SQLException e) { throw new JDBCResourceManagerException( "Failed to insert values into table with SQL statement: " + sql, e); } } stmt.close(); } catch (SQLException e) { throw new JDBCResourceManagerException( String.format("Exception occurred when trying to write records to %s.", tableName), e); } LOG.info("Successfully wrote {} rows to {}.{}.", rows.size(), databaseName, tableName); return true; }
@Test public void testWriteShouldThrowErrorWhenDriverFailsToEstablishConnection() throws SQLException { when(container.getHost()).thenReturn(HOST); when(container.getMappedPort(JDBC_PORT)).thenReturn(MAPPED_PORT); doThrow(SQLException.class).when(driver).getConnection(any(), any(), any()); assertThrows( JDBCResourceManagerException.class, () -> testManager.write(TABLE_NAME, ImmutableList.of(ImmutableMap.of("key", "test")))); }
@Override public void removeNetworkPolicy(String uid) { checkArgument(!Strings.isNullOrEmpty(uid), ERR_NULL_NETWORK_POLICY_UID); synchronized (this) { if (isNetworkPolicyInUse(uid)) { final String error = String.format(MSG_NETWORK_POLICY, uid, ERR_IN_USE); throw new IllegalStateException(error); } NetworkPolicy networkPolicy = k8sNetworkPolicyStore.removeNetworkPolicy(uid); if (networkPolicy != null) { log.info(String.format(MSG_NETWORK_POLICY, networkPolicy.getMetadata().getName(), MSG_REMOVED)); } } }
@Test(expected = IllegalArgumentException.class) public void testRemoveNetworkPolicyWithNull() { target.removeNetworkPolicy(null); }
public static void checkResourcePerms(List<String> resources) { if (resources == null || resources.isEmpty()) { return; } for (String resource : resources) { String[] items = StringUtils.split(resource, "="); if (items.length != 2) { throw new AclException(String.format("Parse Resource format error for %s.\n" + "The expected resource format is 'Res=Perm'. For example: topicA=SUB", resource)); } if (!AclConstants.DENY.equals(items[1].trim()) && Permission.DENY == Permission.parsePermFromString(items[1].trim())) { throw new AclException(String.format("Parse resource permission error for %s.\n" + "The expected permissions are 'SUB' or 'PUB' or 'SUB|PUB' or 'PUB|SUB'.", resource)); } } }
@Test public void checkResourcePermsNormalTest() { Permission.checkResourcePerms(null); Permission.checkResourcePerms(new ArrayList<>()); Permission.checkResourcePerms(Arrays.asList("topicA=PUB")); Permission.checkResourcePerms(Arrays.asList("topicA=PUB", "topicB=SUB", "topicC=PUB|SUB")); }
@Override public void metricChange(final KafkaMetric metric) { if (!THROUGHPUT_METRIC_NAMES.contains(metric.metricName().name()) || !StreamsMetricsImpl.TOPIC_LEVEL_GROUP.equals(metric.metricName().group())) { return; } addMetric( metric, getQueryId(metric), getTopic(metric) ); }
@Test public void shouldThrowWhenFailingToParseQueryId() { // When: assertThrows( KsqlException.class, () -> listener.metricChange(mockMetric( BYTES_CONSUMED_TOTAL, 2D, ImmutableMap.of( "thread-id", "_confluent_blahblah_query-blahblah", "task-id", TASK_ID_1, "processor-node-id", PROCESSOR_NODE_ID, "topic", TOPIC_NAME)) ) ); }
@Override public @NotNull Iterator<E> iterator() { return new HashSetIterator<E>() { @Override public E next() { return nextEntry().key; } }; }
@Test public void iterator() { final HashSet<Integer> tested = new HashSet<>(); final Set<Integer> set = new java.util.HashSet<>(); for (int i = 0; i < 10000; ++i) { tested.add(i); set.add(i); } for (Integer key : tested) { Assert.assertTrue(set.remove(key)); } Assert.assertEquals(0, set.size()); }
public static String dateTime(Date date) { if (date == null) { return null; } return formatter.format(date.toInstant().atZone(ZoneId.systemDefault())); }
@Test public void testDateTime() { Date testDate = Date.from(java.time.LocalDateTime.of(2022, 12, 25, 10, 20, 30).atZone(ZoneId.systemDefault()).toInstant()); String expected = "2022-12-25 10:20:30"; String actual = DateUtils.dateTime(testDate); assertEquals("Test when date is not null",expected, actual); }
@Override public void checkBeforeUpdate(final AlterEncryptRuleStatement sqlStatement) { checkToBeAlteredRules(sqlStatement); checkColumnNames(sqlStatement); checkToBeAlteredEncryptors(sqlStatement); }
@Test void assertCheckSQLStatementWithoutToBeAlteredRules() { EncryptRule rule = mock(EncryptRule.class); when(rule.getConfiguration()).thenReturn(new EncryptRuleConfiguration(Collections.emptyList(), Collections.emptyMap())); executor.setRule(rule); assertThrows(MissingRequiredRuleException.class, () -> executor.checkBeforeUpdate(createSQLStatement("MD5"))); }
@Override public boolean remove(long timestamp) { return get(removeAsync(timestamp)); }
@Test public void testRemove() { RTimeSeries<String, Object> t = redisson.getTimeSeries("test"); t.add(1, "10"); t.add(2, "10"); t.add(3, "30"); t.add(4, "40"); assertThat(t.removeRange(2, 3)).isEqualTo(2); assertThat(t.size()).isEqualTo(2); assertThat(t.range(1, 4)).containsExactly("10", "40"); assertThat(t.rangeReversed(1, 4)).containsExactly("40", "10"); assertThat(t.remove(4)).isTrue(); assertThat(t.remove(5)).isFalse(); assertThat(t.size()).isEqualTo(1); }
public static AssertionResult getResult(SMIMEAssertionTestElement testElement, SampleResult response, String name) { checkForBouncycastle(); AssertionResult res = new AssertionResult(name); try { MimeMessage msg; final int msgPos = testElement.getSpecificMessagePositionAsInt(); if (msgPos < 0){ // means counting from end SampleResult[] subResults = response.getSubResults(); final int pos = subResults.length + msgPos; log.debug("Getting message number: {} of {}", pos, subResults.length); msg = getMessageFromResponse(response,pos); } else { log.debug("Getting message number: {}", msgPos); msg = getMessageFromResponse(response, msgPos); } SMIMESignedParser signedParser = null; if(log.isDebugEnabled()) { log.debug("Content-type: {}", msg.getContentType()); } if (msg.isMimeType("multipart/signed")) { // $NON-NLS-1$ MimeMultipart multipart = (MimeMultipart) msg.getContent(); signedParser = new SMIMESignedParser(new BcDigestCalculatorProvider(), multipart); } else if (msg.isMimeType("application/pkcs7-mime") // $NON-NLS-1$ || msg.isMimeType("application/x-pkcs7-mime")) { // $NON-NLS-1$ signedParser = new SMIMESignedParser(new BcDigestCalculatorProvider(), msg); } if (null != signedParser) { log.debug("Found signature"); if (testElement.isNotSigned()) { res.setFailure(true); res.setFailureMessage("Mime message is signed"); } else if (testElement.isVerifySignature() || !testElement.isSignerNoCheck()) { res = verifySignature(testElement, signedParser, name); } } else { log.debug("Did not find signature"); if (!testElement.isNotSigned()) { res.setFailure(true); res.setFailureMessage("Mime message is not signed"); } } } catch (MessagingException e) { String msg = "Cannot parse mime msg: " + e.getMessage(); log.warn(msg, e); res.setFailure(true); res.setFailureMessage(msg); } catch (CMSException e) { res.setFailure(true); res.setFailureMessage("Error reading the signature: " + e.getMessage()); } catch (SMIMEException e) { res.setFailure(true); res.setFailureMessage("Cannot extract signed body part from signature: " + e.getMessage()); } catch (IOException e) { // should never happen log.error("Cannot read mime message content: {}", e.getMessage(), e); res.setError(true); res.setFailureMessage(e.getMessage()); } return res; }
@Test public void testSignature() { SMIMEAssertionTestElement testElement = new SMIMEAssertionTestElement(); testElement.setVerifySignature(true); AssertionResult result = SMIMEAssertion.getResult(testElement, parent, "Test"); assertFalse(result.isError(), "Result should not be an error"); assertFalse(result.isFailure(), "Result should not fail: " + result.getFailureMessage()); }
public long indexOf(double x, double y) { if (!rectangle.contains(x, y)) { // Put things outside the box at the end // This will also handle infinities and NaNs return Long.MAX_VALUE; } int xInt = (int) (xScale * (x - rectangle.getXMin())); int yInt = (int) (yScale * (y - rectangle.getYMin())); return discreteIndexOf(xInt, yInt); }
@Test public void testDegenerateRectangle() { HilbertIndex hilbert = new HilbertIndex(new Rectangle(0, 0, 0, 0)); assertEquals(hilbert.indexOf(0., 0.), 0); assertEquals(hilbert.indexOf(2., 2.), Long.MAX_VALUE); }
public String convert(ILoggingEvent le) { List<Marker> markers = le.getMarkers(); if (markers == null || markers.isEmpty()) { return EMPTY; } else { return markers.toString(); } }
@Test public void testWithSeveralChildMarker() { Marker marker = markerFactory.getMarker("testParent"); marker.add(markerFactory.getMarker("child1")); marker.add(markerFactory.getMarker("child2")); marker.add(markerFactory.getMarker("child3")); String result = converter.convert(createLoggingEvent(marker)); assertEquals("[testParent [ child1, child2, child3 ]]", result); }
public ConfigCacheKey(ConfigKey<?> key, String defMd5) { this.key = key; this.defMd5 = defMd5 == null ? "" : defMd5; }
@Test public void testConfigCacheKey() { final String defMd5 = "md5"; final String defMd5_2 = "md5_2"; ConfigCacheKey k1 = new ConfigCacheKey("foo", "id", "ns", defMd5); ConfigCacheKey k2 = new ConfigCacheKey("foo", "id", "ns", defMd5); ConfigCacheKey k3 = new ConfigCacheKey("foo", "id", "ns", defMd5_2); ConfigCacheKey k4 = new ConfigCacheKey("foo", "id", "ns_1", defMd5); ConfigCacheKey k5 = new ConfigCacheKey("foo", "id", "ns_1", null); // test with null defMd5 final ConfigKey<?> configKey = new ConfigKey<>("foo", "id", "ns"); ConfigCacheKey k1_2 = new ConfigCacheKey(configKey, defMd5); assertEquals(k1, k1); assertEquals(k1, k1_2); assertEquals(k1, k2); assertNotEquals(k3, k2); assertNotEquals(k4, k1); assertEquals(k2.hashCode(), k1.hashCode()); assertEquals(defMd5, k1.getDefMd5()); assertEquals(configKey + "," + defMd5, k1.toString()); assertNotEquals(k1.hashCode(), k5.hashCode()); }
@Override public boolean isSecured(ApplicationId appId) { return false; }
@Test public void testIsSecured() { assertTrue(store.isSecured(appId)); }
@Override public SmileResponse<T> handle(Request request, Response response) { byte[] bytes = readResponseBytes(response); String contentType = response.getHeader(CONTENT_TYPE); if ((contentType == null) || !MediaType.parse(contentType).is(MEDIA_TYPE_SMILE)) { return new SmileResponse<>(response.getStatusCode(), response.getHeaders(), bytes); } return new SmileResponse<>(response.getStatusCode(), response.getHeaders(), smileCodec, bytes); }
@Test public void testValidSmile() { User user = new User("Joe", 25); byte[] smileBytes = codec.toBytes(user); SmileResponse<User> response = handler.handle(null, mockResponse(OK, MEDIA_TYPE_SMILE, smileBytes)); assertTrue(response.hasValue()); assertEquals(response.getSmileBytes(), smileBytes); assertEquals(response.getValue().getName(), user.getName()); assertEquals(response.getValue().getAge(), user.getAge()); assertNotSame(response.getSmileBytes(), response.getSmileBytes()); assertNotSame(response.getResponseBytes(), response.getResponseBytes()); assertEquals(response.getResponseBytes(), response.getSmileBytes()); }