focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Collection<RedisServer> masters() { List<Map<String, String>> masters = connection.sync(StringCodec.INSTANCE, RedisCommands.SENTINEL_MASTERS); return toRedisServersList(masters); }
@Test public void testMasters() { Collection<RedisServer> masters = connection.masters(); assertThat(masters).hasSize(1); }
@Nullable public Integer getIntValue(@IntFormat final int formatType, @IntRange(from = 0) final int offset) { if ((offset + getTypeLen(formatType)) > size()) return null; return switch (formatType) { case FORMAT_UINT8 -> unsignedByteToInt(mValue[offset]); case FORMAT_UINT16_LE -> unsignedBytesToInt(mValue[offset], mValue[offset + 1]); case FORMAT_UINT16_BE -> unsignedBytesToInt(mValue[offset + 1], mValue[offset]); case FORMAT_UINT24_LE -> unsignedBytesToInt( mValue[offset], mValue[offset + 1], mValue[offset + 2], (byte) 0 ); case FORMAT_UINT24_BE -> unsignedBytesToInt( mValue[offset + 2], mValue[offset + 1], mValue[offset], (byte) 0 ); case FORMAT_UINT32_LE -> unsignedBytesToInt( mValue[offset], mValue[offset + 1], mValue[offset + 2], mValue[offset + 3] ); case FORMAT_UINT32_BE -> unsignedBytesToInt( mValue[offset + 3], mValue[offset + 2], mValue[offset + 1], mValue[offset] ); case FORMAT_SINT8 -> unsignedToSigned(unsignedByteToInt(mValue[offset]), 8); case FORMAT_SINT16_LE -> unsignedToSigned(unsignedBytesToInt(mValue[offset], mValue[offset + 1]), 16); case FORMAT_SINT16_BE -> unsignedToSigned(unsignedBytesToInt(mValue[offset + 1], mValue[offset]), 16); case FORMAT_SINT24_LE -> unsignedToSigned(unsignedBytesToInt( mValue[offset], mValue[offset + 1], mValue[offset + 2], (byte) 0 ), 24); case FORMAT_SINT24_BE -> unsignedToSigned(unsignedBytesToInt( (byte) 0, mValue[offset + 2], mValue[offset + 1], mValue[offset] ), 24); case FORMAT_SINT32_LE -> unsignedToSigned(unsignedBytesToInt( mValue[offset], mValue[offset + 1], mValue[offset + 2], mValue[offset + 3] ), 32); case FORMAT_SINT32_BE -> unsignedToSigned(unsignedBytesToInt( mValue[offset + 3], mValue[offset + 2], mValue[offset + 1], mValue[offset] ), 32); default -> null; }; }
@Test public void getValue_SINT32() { final Data data = new Data(new byte[] { (byte) 0x00, (byte) 0xFD, (byte) 0xFD, (byte) 0xFE }); final int value = data.getIntValue(Data.FORMAT_UINT32_LE, 0); assertEquals(0xfefdfd00, value); }
public OpenAPI filter(OpenAPI openAPI, OpenAPISpecFilter filter, Map<String, List<String>> params, Map<String, String> cookies, Map<String, List<String>> headers) { OpenAPI filteredOpenAPI = filterOpenAPI(filter, openAPI, params, cookies, headers); if (filteredOpenAPI == null) { return filteredOpenAPI; } OpenAPI clone = new OpenAPI(); clone.info(filteredOpenAPI.getInfo()); clone.openapi(filteredOpenAPI.getOpenapi()); clone.jsonSchemaDialect(filteredOpenAPI.getJsonSchemaDialect()); clone.setSpecVersion(filteredOpenAPI.getSpecVersion()); clone.setExtensions(filteredOpenAPI.getExtensions()); clone.setExternalDocs(filteredOpenAPI.getExternalDocs()); clone.setSecurity(filteredOpenAPI.getSecurity()); clone.setServers(filteredOpenAPI.getServers()); clone.tags(filteredOpenAPI.getTags() == null ? null : new ArrayList<>(openAPI.getTags())); final Set<String> allowedTags = new HashSet<>(); final Set<String> filteredTags = new HashSet<>(); Paths clonedPaths = new Paths(); if (filteredOpenAPI.getPaths() != null) { for (String resourcePath : filteredOpenAPI.getPaths().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clonedPaths.addPathItem(resourcePath, clonedPathItem); } } } clone.paths(clonedPaths); } filteredTags.removeAll(allowedTags); final List<Tag> tags = clone.getTags(); if (tags != null && !filteredTags.isEmpty()) { tags.removeIf(tag -> filteredTags.contains(tag.getName())); if (clone.getTags().isEmpty()) { clone.setTags(null); } } if (filteredOpenAPI.getWebhooks() != null) { for (String resourcePath : filteredOpenAPI.getWebhooks().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clone.addWebhooks(resourcePath, clonedPathItem); } } } } if (filteredOpenAPI.getComponents() != null) { clone.components(new Components()); clone.getComponents().setSchemas(filterComponentsSchema(filter, filteredOpenAPI.getComponents().getSchemas(), params, cookies, headers)); clone.getComponents().setSecuritySchemes(filteredOpenAPI.getComponents().getSecuritySchemes()); clone.getComponents().setCallbacks(filteredOpenAPI.getComponents().getCallbacks()); clone.getComponents().setExamples(filteredOpenAPI.getComponents().getExamples()); clone.getComponents().setExtensions(filteredOpenAPI.getComponents().getExtensions()); clone.getComponents().setHeaders(filteredOpenAPI.getComponents().getHeaders()); clone.getComponents().setLinks(filteredOpenAPI.getComponents().getLinks()); clone.getComponents().setParameters(filteredOpenAPI.getComponents().getParameters()); clone.getComponents().setRequestBodies(filteredOpenAPI.getComponents().getRequestBodies()); clone.getComponents().setResponses(filteredOpenAPI.getComponents().getResponses()); clone.getComponents().setPathItems(filteredOpenAPI.getComponents().getPathItems()); } if (filter.isRemovingUnreferencedDefinitions()) { clone = removeBrokenReferenceDefinitions(clone); } return clone; }
@Test(description = "it should filter with null definitions") public void filterWithNullDefinitions() throws IOException { final OpenAPI openAPI = getOpenAPI(RESOURCE_PATH); openAPI.getComponents().setSchemas(null); final InternalModelPropertiesRemoverFilter filter = new InternalModelPropertiesRemoverFilter(); final OpenAPI filtered = new SpecFilter().filter(openAPI, filter, null, null, null); assertNotNull(filtered); }
@Override public void register(Component component) { checkComponent(component); checkArgument(component.getType() == Component.Type.FILE, "component must be a file"); checkState(analysisMetadataHolder.isPullRequest() || !analysisMetadataHolder.isFirstAnalysis(), "No file can be registered on first branch analysis"); addedComponents.add(component); }
@Test public void register_fails_with_NPE_if_component_is_null() { assertThatThrownBy(() -> underTest.register(null)) .isInstanceOf(NullPointerException.class) .hasMessage("component can't be null"); }
public static MkdirsOptions defaults(AlluxioConfiguration conf) { return new MkdirsOptions(conf.getString(PropertyKey.SECURITY_AUTHORIZATION_PERMISSION_UMASK)); }
@Test public void defaults() throws IOException { MkdirsOptions options = MkdirsOptions.defaults(mConfiguration); // Verify the default createParent is true. assertTrue(options.getCreateParent()); // Verify that the owner and group are not set. assertNull(options.getOwner()); assertNull(options.getGroup()); String umask = mConfiguration.getString(PropertyKey.SECURITY_AUTHORIZATION_PERMISSION_UMASK); assertEquals(ModeUtils.applyDirectoryUMask(Mode.defaults(), umask), options.getMode()); }
@Override public void writeData(ObjectDataOutput out) throws IOException { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void testWriteData() throws Exception { localCacheWideEventData.writeData(null); }
@Override public ResultSetMetaData getMetaData() throws SQLException { checkClosed(); return resultSetMetaData; }
@Test void assertGetMetaData() throws SQLException { assertThat(databaseMetaDataResultSet.getMetaData(), is(resultSetMetaData)); }
public RegistryBuilder isDynamic(Boolean dynamic) { this.dynamic = dynamic; return getThis(); }
@Test void isDynamic() { RegistryBuilder builder = new RegistryBuilder(); builder.isDynamic(true); Assertions.assertTrue(builder.build().isDynamic()); }
Flux<Account> getAll(final int segments, final Scheduler scheduler) { if (segments < 1) { throw new IllegalArgumentException("Total number of segments must be positive"); } return Flux.range(0, segments) .parallel() .runOn(scheduler) .flatMap(segment -> asyncClient.scanPaginator(ScanRequest.builder() .tableName(accountsTableName) .consistentRead(true) .segment(segment) .totalSegments(segments) .build()) .items() .map(Accounts::fromItem)) .sequential(); }
@Test void testGetAll() { final List<Account> expectedAccounts = new ArrayList<>(); for (int i = 1; i <= 100; i++) { final Account account = generateAccount("+1" + String.format("%03d", i), UUID.randomUUID(), UUID.randomUUID()); expectedAccounts.add(account); createAccount(account); } final List<Account> retrievedAccounts = accounts.getAll(2, Schedulers.parallel()).collectList().block(); assertNotNull(retrievedAccounts); assertEquals(expectedAccounts.stream().map(Account::getUuid).collect(Collectors.toSet()), retrievedAccounts.stream().map(Account::getUuid).collect(Collectors.toSet())); }
public static <U> Task<U> withRetryPolicy(String name, RetryPolicy policy, Function1<Integer, Task<U>> taskFunction) { RetriableTask<U> retriableTask = new RetriableTask<>(name, policy, taskFunction); Task<U> retryTaskWrapper = Task.async(name + " retriableTask", retriableTask::run); retryTaskWrapper.getShallowTraceBuilder().setTaskType(TaskType.WITH_RETRY.getName()); return retryTaskWrapper; }
@Test public void testSimpleRetryPolicy() { Task<Void> task = withRetryPolicy("testSimpleRetryPolicy", RetryPolicy.attempts(3, 0), attempt -> Task.failure(new RuntimeException("current attempt: " + attempt))); runAndWaitException(task, RuntimeException.class); assertTrue(task.isDone()); assertEquals(task.getError().getMessage(), "current attempt: 2"); }
public Optional<ContentPack> findByIdAndRevision(ModelId id, int revision) { final DBQuery.Query query = DBQuery.is(Identified.FIELD_META_ID, id).is(Revisioned.FIELD_META_REVISION, revision); return Optional.ofNullable(dbCollection.findOne(query)); }
@Test @MongoDBFixtures("ContentPackPersistenceServiceTest.json") public void findByIdAndRevisionWithInvalidRevision() { final Optional<ContentPack> contentPack = contentPackPersistenceService.findByIdAndRevision(ModelId.of("dcd74ede-6832-4ef7-9f69-deadbeef0000"), 42); assertThat(contentPack).isEmpty(); }
public static Optional<NoticeContents> parseNoticeFile(Path noticeFile) throws IOException { // 1st line contains module name final List<String> noticeContents = Files.readAllLines(noticeFile); return parseNoticeFile(noticeContents); }
@Test void testParseNoticeFileBundlesPath() { final String module = "some-module"; final Dependency dependency = Dependency.create("groupId", "artifactId", "version", "classifier"); final List<String> noticeContents = Arrays.asList( module, "", "Something bundles \"groupId:artifactId:classifier:version\""); assertThat(NoticeParser.parseNoticeFile(noticeContents)) .hasValueSatisfying( contents -> { assertThat(contents.getNoticeModuleName()).isEqualTo(module); assertThat(contents.getDeclaredDependencies()) .containsExactlyInAnyOrder(dependency); }); }
public MethodBuilder isReturn(Boolean isReturn) { this.isReturn = isReturn; return getThis(); }
@Test void isReturn() { MethodBuilder builder = MethodBuilder.newBuilder(); builder.isReturn(true); Assertions.assertTrue(builder.build().isReturn()); }
@Override public long checksum() { long c = checksum(this.type.getNumber(), this.id.checksum()); c = checksum(this.peers, c); c = checksum(this.oldPeers, c); c = checksum(this.learners, c); c = checksum(this.oldLearners, c); if (this.data != null && this.data.hasRemaining()) { c = checksum(c, CrcUtil.crc64(this.data)); } return c; }
@Test public void testChecksum() { ByteBuffer buf = ByteBuffer.wrap("hello".getBytes()); LogEntry entry = new LogEntry(EnumOutter.EntryType.ENTRY_TYPE_NO_OP); entry.setId(new LogId(100, 3)); entry.setData(buf); entry.setPeers(Arrays.asList(new PeerId("localhost", 99, 1), new PeerId("localhost", 100, 2))); long c = entry.checksum(); assertTrue(c != 0); assertEquals(c, entry.checksum()); assertFalse(entry.isCorrupted()); assertFalse(entry.hasChecksum()); entry.setChecksum(c); assertTrue(entry.hasChecksum()); assertFalse(entry.isCorrupted()); // modify index, detect corrupted. entry.getId().setIndex(1); assertNotEquals(c, entry.checksum()); assertTrue(entry.isCorrupted()); // fix index entry.getId().setIndex(100); assertFalse(entry.isCorrupted()); // modify data, detect corrupted entry.setData(ByteBuffer.wrap("hEllo".getBytes())); assertNotEquals(c, entry.checksum()); assertTrue(entry.isCorrupted()); }
private long replayStartPosition(final RecordingLog.Entry lastTerm) { return replayStartPosition(lastTerm, snapshotsRetrieved, ctx.initialReplayStart(), backupArchive); }
@Test void shouldReturnNullPositionIfLastTermIsNullAndSnapshotsIsEmpty() { assertEquals(NULL_POSITION, replayStartPosition(null, emptyList(), ReplayStart.BEGINNING, mockAeronArchive)); }
void handleStatement(final QueuedCommand queuedCommand) { throwIfNotConfigured(); handleStatementWithTerminatedQueries( queuedCommand.getAndDeserializeCommand(commandDeserializer), queuedCommand.getAndDeserializeCommandId(), queuedCommand.getStatus(), Mode.EXECUTE, queuedCommand.getOffset(), false ); }
@Test public void shouldSetCorrectFinalStatusOnCompletedPlannedDDLCommand() { // Given: when(mockEngine.execute(any(), any(ConfiguredKsqlPlan.class), any(Boolean.class))) .thenReturn(ExecuteResult.of("result")); // When: handleStatement( statementExecutorWithMocks, plannedCommand, COMMAND_ID, Optional.of(status), 0L ); // Then: verify(status).setFinalStatus(new CommandStatus(Status.SUCCESS, "result")); }
@Override public void write(final MySQLPacketPayload payload, final Object value) { LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(((Time) value).getTime()), ZoneId.systemDefault()); int hours = localDateTime.getHour(); int minutes = localDateTime.getMinute(); int seconds = localDateTime.getSecond(); int nanos = localDateTime.getNano(); boolean isTimeAbsent = 0 == hours && 0 == minutes && 0 == seconds; boolean isNanosAbsent = 0 == nanos; if (isTimeAbsent && isNanosAbsent) { payload.writeInt1(0); return; } if (isNanosAbsent) { payload.writeInt1(8); writeTime(payload, hours, minutes, seconds); return; } payload.writeInt1(12); writeTime(payload, hours, minutes, seconds); writeNanos(payload, nanos); }
@Test void assertWriteWithZeroByte() { MySQLTimeBinaryProtocolValue actual = new MySQLTimeBinaryProtocolValue(); actual.write(payload, Time.valueOf("00:00:00")); verify(payload).writeInt1(0); }
@Operation(summary = "update", description = "Update a host") @PutMapping("/{id}") public ResponseEntity<HostVO> update( @PathVariable Long clusterId, @PathVariable Long id, @RequestBody @Validated HostReq hostReq) { HostDTO hostDTO = HostConverter.INSTANCE.fromReq2DTO(hostReq); return ResponseEntity.success(hostService.update(id, hostDTO)); }
@Test void updateReturnsNullForInvalidHostId() { Long clusterId = 1L; Long hostId = 999L; HostReq hostReq = new HostReq(); when(hostService.update(anyLong(), any(HostDTO.class))).thenReturn(null); ResponseEntity<HostVO> response = hostController.update(clusterId, hostId, hostReq); assertTrue(response.isSuccess()); assertNull(response.getData()); }
@Override public OpenstackVtapNetwork updateVtapNetwork(OpenstackVtapNetwork description) { checkNotNull(description, VTAP_DESC_NULL, "vtapNetwork"); return store.updateVtapNetwork(VTAP_NETWORK_KEY, description); }
@Test(expected = NullPointerException.class) public void testUpdateNullVtapNetwork() { target.updateVtapNetwork(null); }
@Override public void importData(JsonReader reader) throws IOException { logger.info("Reading configuration for 1.2"); // this *HAS* to start as an object reader.beginObject(); while (reader.hasNext()) { JsonToken tok = reader.peek(); switch (tok) { case NAME: String name = reader.nextName(); // find out which member it is if (name.equals(CLIENTS)) { readClients(reader); } else if (name.equals(GRANTS)) { readGrants(reader); } else if (name.equals(WHITELISTEDSITES)) { readWhitelistedSites(reader); } else if (name.equals(BLACKLISTEDSITES)) { readBlacklistedSites(reader); } else if (name.equals(AUTHENTICATIONHOLDERS)) { readAuthenticationHolders(reader); } else if (name.equals(ACCESSTOKENS)) { readAccessTokens(reader); } else if (name.equals(REFRESHTOKENS)) { readRefreshTokens(reader); } else if (name.equals(SYSTEMSCOPES)) { readSystemScopes(reader); } else { for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { extension.importExtensionData(name, reader); break; } } // unknown token, skip it reader.skipValue(); } break; case END_OBJECT: // the object ended, we're done here reader.endObject(); continue; default: logger.debug("Found unexpected entry"); reader.skipValue(); continue; } } fixObjectReferences(); for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { extension.fixExtensionObjectReferences(maps); break; } } maps.clearAll(); }
@Test public void testFixRefreshTokenAuthHolderReferencesOnImport() throws IOException, ParseException { String expiration1 = "2014-09-10T22:49:44.090+00:00"; Date expirationDate1 = formatter.parse(expiration1, Locale.ENGLISH); ClientDetailsEntity mockedClient1 = mock(ClientDetailsEntity.class); when(mockedClient1.getClientId()).thenReturn("mocked_client_1"); OAuth2Request req1 = new OAuth2Request(new HashMap<String, String>(), "client1", new ArrayList<GrantedAuthority>(), true, new HashSet<String>(), new HashSet<String>(), "http://foo.com", new HashSet<String>(), null); Authentication mockAuth1 = mock(Authentication.class, withSettings().serializable()); OAuth2Authentication auth1 = new OAuth2Authentication(req1, mockAuth1); AuthenticationHolderEntity holder1 = new AuthenticationHolderEntity(); holder1.setId(1L); holder1.setAuthentication(auth1); OAuth2RefreshTokenEntity token1 = new OAuth2RefreshTokenEntity(); token1.setId(1L); token1.setClient(mockedClient1); token1.setExpiration(expirationDate1); token1.setJwt(JWTParser.parse("eyJhbGciOiJub25lIn0.eyJqdGkiOiJmOTg4OWQyOS0xMTk1LTQ4ODEtODgwZC1lZjVlYzAwY2Y4NDIifQ.")); token1.setAuthenticationHolder(holder1); String expiration2 = "2015-01-07T18:31:50.079+00:00"; Date expirationDate2 = formatter.parse(expiration2, Locale.ENGLISH); ClientDetailsEntity mockedClient2 = mock(ClientDetailsEntity.class); when(mockedClient2.getClientId()).thenReturn("mocked_client_2"); OAuth2Request req2 = new OAuth2Request(new HashMap<String, String>(), "client2", new ArrayList<GrantedAuthority>(), true, new HashSet<String>(), new HashSet<String>(), "http://bar.com", new HashSet<String>(), null); Authentication mockAuth2 = mock(Authentication.class, withSettings().serializable()); OAuth2Authentication auth2 = new OAuth2Authentication(req2, mockAuth2); AuthenticationHolderEntity holder2 = new AuthenticationHolderEntity(); holder2.setId(2L); holder2.setAuthentication(auth2); OAuth2RefreshTokenEntity token2 = new OAuth2RefreshTokenEntity(); token2.setId(2L); token2.setClient(mockedClient2); token2.setExpiration(expirationDate2); token2.setJwt(JWTParser.parse("eyJhbGciOiJub25lIn0.eyJqdGkiOiJlYmEyYjc3My0xNjAzLTRmNDAtOWQ3MS1hMGIxZDg1OWE2MDAifQ.")); token2.setAuthenticationHolder(holder2); String configJson = "{" + "\"" + MITREidDataService.SYSTEMSCOPES + "\": [], " + "\"" + MITREidDataService.ACCESSTOKENS + "\": [], " + "\"" + MITREidDataService.CLIENTS + "\": [], " + "\"" + MITREidDataService.GRANTS + "\": [], " + "\"" + MITREidDataService.WHITELISTEDSITES + "\": [], " + "\"" + MITREidDataService.BLACKLISTEDSITES + "\": [], " + "\"" + MITREidDataService.AUTHENTICATIONHOLDERS + "\": [" + "{\"id\":1,\"authentication\":{\"authorizationRequest\":{\"clientId\":\"client1\",\"redirectUri\":\"http://foo.com\"}," + "\"userAuthentication\":null}}," + "{\"id\":2,\"authentication\":{\"authorizationRequest\":{\"clientId\":\"client2\",\"redirectUri\":\"http://bar.com\"}," + "\"userAuthentication\":null}}" + " ]," + "\"" + MITREidDataService.REFRESHTOKENS + "\": [" + "{\"id\":1,\"clientId\":\"mocked_client_1\",\"expiration\":\"2014-09-10T22:49:44.090+00:00\"," + "\"authenticationHolderId\":1,\"value\":\"eyJhbGciOiJub25lIn0.eyJqdGkiOiJmOTg4OWQyOS0xMTk1LTQ4ODEtODgwZC1lZjVlYzAwY2Y4NDIifQ.\"}," + "{\"id\":2,\"clientId\":\"mocked_client_2\",\"expiration\":\"2015-01-07T18:31:50.079+00:00\"," + "\"authenticationHolderId\":2,\"value\":\"eyJhbGciOiJub25lIn0.eyJqdGkiOiJlYmEyYjc3My0xNjAzLTRmNDAtOWQ3MS1hMGIxZDg1OWE2MDAifQ.\"}" + " ]" + "}"; logger.debug(configJson); JsonReader reader = new JsonReader(new StringReader(configJson)); final Map<Long, OAuth2RefreshTokenEntity> fakeRefreshTokenTable = new HashMap<>(); final Map<Long, AuthenticationHolderEntity> fakeAuthHolderTable = new HashMap<>(); when(tokenRepository.saveRefreshToken(isA(OAuth2RefreshTokenEntity.class))).thenAnswer(new Answer<OAuth2RefreshTokenEntity>() { Long id = 343L; @Override public OAuth2RefreshTokenEntity answer(InvocationOnMock invocation) throws Throwable { OAuth2RefreshTokenEntity _token = (OAuth2RefreshTokenEntity) invocation.getArguments()[0]; if(_token.getId() == null) { _token.setId(id++); } fakeRefreshTokenTable.put(_token.getId(), _token); return _token; } }); when(tokenRepository.getRefreshTokenById(anyLong())).thenAnswer(new Answer<OAuth2RefreshTokenEntity>() { @Override public OAuth2RefreshTokenEntity answer(InvocationOnMock invocation) throws Throwable { Long _id = (Long) invocation.getArguments()[0]; return fakeRefreshTokenTable.get(_id); } }); when(clientRepository.getClientByClientId(anyString())).thenAnswer(new Answer<ClientDetailsEntity>() { @Override public ClientDetailsEntity answer(InvocationOnMock invocation) throws Throwable { String _clientId = (String) invocation.getArguments()[0]; ClientDetailsEntity _client = mock(ClientDetailsEntity.class); when(_client.getClientId()).thenReturn(_clientId); return _client; } }); when(authHolderRepository.save(isA(AuthenticationHolderEntity.class))).thenAnswer(new Answer<AuthenticationHolderEntity>() { Long id = 356L; @Override public AuthenticationHolderEntity answer(InvocationOnMock invocation) throws Throwable { AuthenticationHolderEntity _holder = (AuthenticationHolderEntity) invocation.getArguments()[0]; if(_holder.getId() == null) { _holder.setId(id++); } fakeAuthHolderTable.put(_holder.getId(), _holder); return _holder; } }); when(authHolderRepository.getById(anyLong())).thenAnswer(new Answer<AuthenticationHolderEntity>() { @Override public AuthenticationHolderEntity answer(InvocationOnMock invocation) throws Throwable { Long _id = (Long) invocation.getArguments()[0]; return fakeAuthHolderTable.get(_id); } }); dataService.importData(reader); List<OAuth2RefreshTokenEntity> savedRefreshTokens = new ArrayList(fakeRefreshTokenTable.values()); //capturedRefreshTokens.getAllValues(); Collections.sort(savedRefreshTokens, new refreshTokenIdComparator()); assertThat(savedRefreshTokens.get(0).getAuthenticationHolder().getId(), equalTo(356L)); assertThat(savedRefreshTokens.get(1).getAuthenticationHolder().getId(), equalTo(357L)); }
@Override public T master() { List<ChildData<T>> children = getActiveChildren(); children.sort(sequenceComparator); if (children.isEmpty()) { return null; } return children.get(0).getNode(); }
@Test public void testMaster() throws Exception { putChildData(group, PATH + "/001", "container1"); putChildData(group, PATH + "/002", "container2"); putChildData(group, PATH + "/003", "container3"); NodeState master = group.master(); assertThat(master, notNullValue()); assertThat(master.getContainer(), equalTo("container1")); }
@Override public Response toResponse(Throwable e) { if (log.isDebugEnabled()) { log.debug("Uncaught exception in REST call: ", e); } else if (log.isInfoEnabled()) { log.info("Uncaught exception in REST call: {}", e.getMessage()); } if (e instanceof NotFoundException) { return buildResponse(Response.Status.NOT_FOUND, e); } else if (e instanceof InvalidRequestException) { return buildResponse(Response.Status.BAD_REQUEST, e); } else if (e instanceof InvalidTypeIdException) { return buildResponse(Response.Status.NOT_IMPLEMENTED, e); } else if (e instanceof JsonMappingException) { return buildResponse(Response.Status.BAD_REQUEST, e); } else if (e instanceof ClassNotFoundException) { return buildResponse(Response.Status.NOT_IMPLEMENTED, e); } else if (e instanceof SerializationException) { return buildResponse(Response.Status.BAD_REQUEST, e); } else if (e instanceof RequestConflictException) { return buildResponse(Response.Status.CONFLICT, e); } else { return buildResponse(Response.Status.INTERNAL_SERVER_ERROR, e); } }
@Test public void testToResponseInvalidRequestException() { RestExceptionMapper mapper = new RestExceptionMapper(); Response resp = mapper.toResponse(new InvalidRequestException("invalid request")); assertEquals(resp.getStatus(), Response.Status.BAD_REQUEST.getStatusCode()); }
public static WorkerIdentity fromProto(alluxio.grpc.WorkerIdentity proto) throws ProtoParsingException { return Parsers.fromProto(proto); }
@Test public void parserInvalidVersion() throws Exception { alluxio.grpc.WorkerIdentity proto = alluxio.grpc.WorkerIdentity.newBuilder() .setVersion(-1) .setIdentifier(ByteString.copyFrom(Longs.toByteArray(1L))) .build(); assertThrows(InvalidVersionParsingException.class, () -> WorkerIdentity.Parsers.fromProto(proto)); }
@Override public void handle(Connection connection, DatabaseCharsetChecker.State state) throws SQLException { // Charset is a global setting on Oracle, it can't be set on a specified schema with a // different value. To not block users who already have a SonarQube schema, charset // is verified only on fresh installs but not on upgrades. Let's hope they won't face // any errors related to charset if they didn't follow the UTF8 requirement when creating // the schema in previous SonarQube versions. if (state == DatabaseCharsetChecker.State.FRESH_INSTALL) { LoggerFactory.getLogger(getClass()).info("Verify that database charset is UTF8"); expectUtf8(connection); } }
@Test public void does_nothing_if_regular_startup() throws Exception { underTest.handle(connection, DatabaseCharsetChecker.State.STARTUP); verifyNoInteractions(sqlExecutor); }
@Override public AnalysisPhase getAnalysisPhase() { return AnalysisPhase.INFORMATION_COLLECTION; }
@Test public void testGetAnalysisPhase() { PerlCpanfileAnalyzer instance = new PerlCpanfileAnalyzer(); AnalysisPhase expResult = AnalysisPhase.INFORMATION_COLLECTION; AnalysisPhase result = instance.getAnalysisPhase(); assertEquals(expResult, result); }
@Deprecated public static <T> Task<T> withSideEffect(final Task<T> parent, final Task<?> sideEffect) { return parent.withSideEffect(t -> sideEffect); }
@Test public void testSideEffectFullCompletion() throws InterruptedException { // ensure that the individual side effect task will be run Task<String> taskOne = new BaseTask<String>() { @Override protected Promise<? extends String> run(Context context) throws Exception { return Promises.value("one"); } }; Task<String> taskTwo = new BaseTask<String>() { @Override protected Promise<? extends String> run(Context context) throws Exception { return Promises.value("two"); } }; Task<String> withSideEffect = taskOne.withSideEffect(x -> taskTwo); runAndWait("TestTasks.testSideEffectFullCompletion", withSideEffect); taskTwo.await(); assertTrue(withSideEffect.isDone()); assertTrue(taskTwo.isDone()); }
@Override public String convert(final EncryptRuleConfiguration ruleConfig) { if (ruleConfig.getTables().isEmpty()) { return ""; } StringBuilder result = new StringBuilder(EncryptDistSQLConstants.CREATE_ENCRYPT); Iterator<EncryptTableRuleConfiguration> iterator = ruleConfig.getTables().iterator(); while (iterator.hasNext()) { EncryptTableRuleConfiguration tableRuleConfig = iterator.next(); result.append(String.format(EncryptDistSQLConstants.ENCRYPT, tableRuleConfig.getName(), getEncryptColumns(tableRuleConfig.getColumns(), ruleConfig.getEncryptors()))); if (iterator.hasNext()) { result.append(DistSQLConstants.COMMA).append(System.lineSeparator()); } } result.append(DistSQLConstants.SEMI); return result.toString(); }
@Test void assertConvertWithEmptyTables() { EncryptRuleConfiguration encryptRuleConfig = mock(EncryptRuleConfiguration.class); when(encryptRuleConfig.getTables()).thenReturn(Collections.emptyList()); EncryptRuleConfigurationToDistSQLConverter encryptRuleConfigurationToDistSQLConverter = new EncryptRuleConfigurationToDistSQLConverter(); assertThat(encryptRuleConfigurationToDistSQLConverter.convert(encryptRuleConfig), is("")); }
@GET @Path("{path:.*}") @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response get(@PathParam("path") String path, @Context UriInfo uriInfo, @QueryParam(OperationParam.NAME) OperationParam op, @Context Parameters params, @Context HttpServletRequest request) throws IOException, FileSystemAccessException { // Restrict access to only GETFILESTATUS and LISTSTATUS in write-only mode if((op.value() != HttpFSFileSystem.Operation.GETFILESTATUS) && (op.value() != HttpFSFileSystem.Operation.LISTSTATUS) && accessMode == AccessMode.WRITEONLY) { return Response.status(Response.Status.FORBIDDEN).build(); } UserGroupInformation user = HttpUserGroupInformation.get(); Response response; path = makeAbsolute(path); MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name()); MDC.put("hostname", request.getRemoteAddr()); switch (op.value()) { case OPEN: { Boolean noRedirect = params.get( NoRedirectParam.NAME, NoRedirectParam.class); if (noRedirect) { URI redirectURL = createOpenRedirectionURL(uriInfo); final String js = JsonUtil.toJsonString("Location", redirectURL); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } else { //Invoking the command directly using an unmanaged FileSystem that is // released by the FileSystemReleaseFilter final FSOperations.FSOpen command = new FSOperations.FSOpen(path); final FileSystem fs = createFileSystem(user); InputStream is = null; UserGroupInformation ugi = UserGroupInformation .createProxyUser(user.getShortUserName(), UserGroupInformation.getLoginUser()); try { is = ugi.doAs(new PrivilegedExceptionAction<InputStream>() { @Override public InputStream run() throws Exception { return command.execute(fs); } }); } catch (InterruptedException ie) { LOG.warn("Open interrupted.", ie); Thread.currentThread().interrupt(); } Long offset = params.get(OffsetParam.NAME, OffsetParam.class); Long len = params.get(LenParam.NAME, LenParam.class); AUDIT_LOG.info("[{}] offset [{}] len [{}]", new Object[] { path, offset, len }); InputStreamEntity entity = new InputStreamEntity(is, offset, len); response = Response.ok(entity).type(MediaType.APPLICATION_OCTET_STREAM) .build(); } break; } case GETFILESTATUS: { FSOperations.FSFileStatus command = new FSOperations.FSFileStatus(path); Map json = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case LISTSTATUS: { String filter = params.get(FilterParam.NAME, FilterParam.class); FSOperations.FSListStatus command = new FSOperations.FSListStatus(path, filter); Map json = fsExecute(user, command); AUDIT_LOG.info("[{}] filter [{}]", path, (filter != null) ? filter : "-"); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETHOMEDIRECTORY: { enforceRootPath(op.value(), path); FSOperations.FSHomeDir command = new FSOperations.FSHomeDir(); JSONObject json = fsExecute(user, command); AUDIT_LOG.info("Home Directory for [{}]", user); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case INSTRUMENTATION: { enforceRootPath(op.value(), path); Groups groups = HttpFSServerWebApp.get().get(Groups.class); Set<String> userGroups = groups.getGroupsSet(user.getShortUserName()); if (!userGroups.contains(HttpFSServerWebApp.get().getAdminGroup())) { throw new AccessControlException( "User not in HttpFSServer admin group"); } Instrumentation instrumentation = HttpFSServerWebApp.get().get(Instrumentation.class); Map snapshot = instrumentation.getSnapshot(); response = Response.ok(snapshot).build(); break; } case GETCONTENTSUMMARY: { FSOperations.FSContentSummary command = new FSOperations.FSContentSummary(path); Map json = fsExecute(user, command); AUDIT_LOG.info("Content summary for [{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETQUOTAUSAGE: { FSOperations.FSQuotaUsage command = new FSOperations.FSQuotaUsage(path); Map json = fsExecute(user, command); AUDIT_LOG.info("Quota Usage for [{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETFILECHECKSUM: { FSOperations.FSFileChecksum command = new FSOperations.FSFileChecksum(path); Boolean noRedirect = params.get( NoRedirectParam.NAME, NoRedirectParam.class); AUDIT_LOG.info("[{}]", path); if (noRedirect) { URI redirectURL = createOpenRedirectionURL(uriInfo); final String js = JsonUtil.toJsonString("Location", redirectURL); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } else { Map json = fsExecute(user, command); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); } break; } case GETFILEBLOCKLOCATIONS: { long offset = 0; long len = Long.MAX_VALUE; Long offsetParam = params.get(OffsetParam.NAME, OffsetParam.class); Long lenParam = params.get(LenParam.NAME, LenParam.class); AUDIT_LOG.info("[{}] offset [{}] len [{}]", path, offsetParam, lenParam); if (offsetParam != null && offsetParam > 0) { offset = offsetParam; } if (lenParam != null && lenParam > 0) { len = lenParam; } FSOperations.FSFileBlockLocations command = new FSOperations.FSFileBlockLocations(path, offset, len); @SuppressWarnings("rawtypes") Map locations = fsExecute(user, command); final String json = JsonUtil.toJsonString("BlockLocations", locations); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETACLSTATUS: { FSOperations.FSAclStatus command = new FSOperations.FSAclStatus(path); Map json = fsExecute(user, command); AUDIT_LOG.info("ACL status for [{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETXATTRS: { List<String> xattrNames = params.getValues(XAttrNameParam.NAME, XAttrNameParam.class); XAttrCodec encoding = params.get(XAttrEncodingParam.NAME, XAttrEncodingParam.class); FSOperations.FSGetXAttrs command = new FSOperations.FSGetXAttrs(path, xattrNames, encoding); @SuppressWarnings("rawtypes") Map json = fsExecute(user, command); AUDIT_LOG.info("XAttrs for [{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case LISTXATTRS: { FSOperations.FSListXAttrs command = new FSOperations.FSListXAttrs(path); @SuppressWarnings("rawtypes") Map json = fsExecute(user, command); AUDIT_LOG.info("XAttr names for [{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case LISTSTATUS_BATCH: { String startAfter = params.get( HttpFSParametersProvider.StartAfterParam.NAME, HttpFSParametersProvider.StartAfterParam.class); byte[] token = HttpFSUtils.EMPTY_BYTES; if (startAfter != null) { token = startAfter.getBytes(StandardCharsets.UTF_8); } FSOperations.FSListStatusBatch command = new FSOperations .FSListStatusBatch(path, token); @SuppressWarnings("rawtypes") Map json = fsExecute(user, command); AUDIT_LOG.info("[{}] token [{}]", path, token); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETTRASHROOT: { FSOperations.FSTrashRoot command = new FSOperations.FSTrashRoot(path); JSONObject json = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETALLSTORAGEPOLICY: { FSOperations.FSGetAllStoragePolicies command = new FSOperations.FSGetAllStoragePolicies(); JSONObject json = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETSTORAGEPOLICY: { FSOperations.FSGetStoragePolicy command = new FSOperations.FSGetStoragePolicy(path); JSONObject json = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETSNAPSHOTDIFF: { String oldSnapshotName = params.get(OldSnapshotNameParam.NAME, OldSnapshotNameParam.class); String snapshotName = params.get(SnapshotNameParam.NAME, SnapshotNameParam.class); FSOperations.FSGetSnapshotDiff command = new FSOperations.FSGetSnapshotDiff(path, oldSnapshotName, snapshotName); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETSNAPSHOTDIFFLISTING: { String oldSnapshotName = params.get(OldSnapshotNameParam.NAME, OldSnapshotNameParam.class); String snapshotName = params.get(SnapshotNameParam.NAME, SnapshotNameParam.class); String snapshotDiffStartPath = params .get(HttpFSParametersProvider.SnapshotDiffStartPathParam.NAME, HttpFSParametersProvider.SnapshotDiffStartPathParam.class); Integer snapshotDiffIndex = params.get(HttpFSParametersProvider.SnapshotDiffIndexParam.NAME, HttpFSParametersProvider.SnapshotDiffIndexParam.class); FSOperations.FSGetSnapshotDiffListing command = new FSOperations.FSGetSnapshotDiffListing(path, oldSnapshotName, snapshotName, snapshotDiffStartPath, snapshotDiffIndex); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETSNAPSHOTTABLEDIRECTORYLIST: { FSOperations.FSGetSnapshottableDirListing command = new FSOperations.FSGetSnapshottableDirListing(); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", "/"); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETSNAPSHOTLIST: { FSOperations.FSGetSnapshotListing command = new FSOperations.FSGetSnapshotListing(path); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", "/"); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETSERVERDEFAULTS: { FSOperations.FSGetServerDefaults command = new FSOperations.FSGetServerDefaults(); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", "/"); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case CHECKACCESS: { String mode = params.get(FsActionParam.NAME, FsActionParam.class); FsActionParam fsparam = new FsActionParam(mode); FSOperations.FSAccess command = new FSOperations.FSAccess(path, FsAction.getFsAction(fsparam.value())); fsExecute(user, command); AUDIT_LOG.info("[{}]", "/"); response = Response.ok().build(); break; } case GETECPOLICY: { FSOperations.FSGetErasureCodingPolicy command = new FSOperations.FSGetErasureCodingPolicy(path); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETECPOLICIES: { FSOperations.FSGetErasureCodingPolicies command = new FSOperations.FSGetErasureCodingPolicies(); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETECCODECS: { FSOperations.FSGetErasureCodingCodecs command = new FSOperations.FSGetErasureCodingCodecs(); Map json = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GET_BLOCK_LOCATIONS: { long offset = 0; long len = Long.MAX_VALUE; Long offsetParam = params.get(OffsetParam.NAME, OffsetParam.class); Long lenParam = params.get(LenParam.NAME, LenParam.class); AUDIT_LOG.info("[{}] offset [{}] len [{}]", path, offsetParam, lenParam); if (offsetParam != null && offsetParam > 0) { offset = offsetParam; } if (lenParam != null && lenParam > 0) { len = lenParam; } FSOperations.FSFileBlockLocationsLegacy command = new FSOperations.FSFileBlockLocationsLegacy(path, offset, len); @SuppressWarnings("rawtypes") Map locations = fsExecute(user, command); final String json = JsonUtil.toJsonString("LocatedBlocks", locations); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETFILELINKSTATUS: { FSOperations.FSFileLinkStatus command = new FSOperations.FSFileLinkStatus(path); @SuppressWarnings("rawtypes") Map js = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETSTATUS: { FSOperations.FSStatus command = new FSOperations.FSStatus(path); @SuppressWarnings("rawtypes") Map js = fsExecute(user, command); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETTRASHROOTS: { Boolean allUsers = params.get(AllUsersParam.NAME, AllUsersParam.class); FSOperations.FSGetTrashRoots command = new FSOperations.FSGetTrashRoots(allUsers); Map json = fsExecute(user, command); AUDIT_LOG.info("allUsers [{}]", allUsers); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } default: { throw new IOException( MessageFormat.format("Invalid HTTP GET operation [{0}]", op.value())); } } return response; }
@Test @TestDir @TestJetty @TestHdfs public void testStoragePolicySatisfier() throws Exception { createHttpFSServer(false, false); final String dir = "/parent"; Path path1 = new Path(dir); String file = "/parent/file"; Path filePath = new Path(file); DistributedFileSystem dfs = (DistributedFileSystem) FileSystem .get(path1.toUri(), TestHdfsHelper.getHdfsConf()); dfs.mkdirs(path1); dfs.create(filePath).close(); dfs.setStoragePolicy(filePath, HdfsConstants.COLD_STORAGE_POLICY_NAME); BlockStoragePolicy storagePolicy = (BlockStoragePolicy) dfs.getStoragePolicy(filePath); assertEquals(HdfsConstants.COLD_STORAGE_POLICY_NAME, storagePolicy.getName()); HttpURLConnection conn = putCmdWithReturn(dir, "SATISFYSTORAGEPOLICY", ""); Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); Map<String, byte[]> xAttrs = dfs.getXAttrs(path1); assertTrue( xAttrs.containsKey(HdfsServerConstants.XATTR_SATISFY_STORAGE_POLICY)); }
public static Slime jsonToSlimeOrThrow(String json) { return jsonToSlimeOrThrow(json.getBytes(StandardCharsets.UTF_8)); }
@Test public void test_invalid_json() { try { SlimeUtils.jsonToSlimeOrThrow("foo"); fail(); } catch (RuntimeException e) { assertEquals("Unexpected character 'o'", e.getMessage()); } }
public Map<String, String> parse(String body) { final ImmutableMap.Builder<String, String> newLookupBuilder = ImmutableMap.builder(); final String[] lines = body.split(lineSeparator); for (String line : lines) { if (line.startsWith(this.ignorechar)) { continue; } final String[] values = line.split(this.splitPattern); if (values.length <= Math.max(keyColumn, keyOnly ? 0 : valueColumn)) { continue; } final String key = this.caseInsensitive ? values[keyColumn].toLowerCase(Locale.ENGLISH) : values[keyColumn]; final String value = this.keyOnly ? "" : values[valueColumn].trim(); final String finalKey = Strings.isNullOrEmpty(quoteChar) ? key.trim() : key.trim().replaceAll("^" + quoteChar + "|" + quoteChar + "$", ""); final String finalValue = Strings.isNullOrEmpty(quoteChar) ? value.trim() : value.trim().replaceAll("^" + quoteChar + "|" + quoteChar + "$", ""); newLookupBuilder.put(finalKey, finalValue); } return newLookupBuilder.build(); }
@Test public void parseKeyOnlyFileWithNonexistingKeyColumn() throws Exception { final String input = "# Sample file for testing\n" + "1;foo\n" + "2;bar\n" + "3;baz"; final DSVParser dsvParser = new DSVParser("#", "\n", ";", "", true, false, 2, Optional.empty()); final Map<String, String> result = dsvParser.parse(input); assertThat(result) .isNotNull() .isEmpty(); }
public void formatSource(CharSource input, CharSink output) throws FormatterException, IOException { // TODO(cushon): proper support for streaming input/output. Input may // not be feasible (parsing) but output should be easier. output.write(formatSource(input.read())); }
@Test public void badConstructor() throws FormatterException { String input = "class X { Y() {} }"; String output = new Formatter().formatSource(input); String expect = "class X {\n Y() {}\n}\n"; assertThat(output).isEqualTo(expect); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ShowSQLFederationRuleStatement sqlStatement, final ContextManager contextManager) { SQLFederationRuleConfiguration ruleConfig = rule.getConfiguration(); boolean sqlFederationEnabled = ruleConfig.isSqlFederationEnabled(); boolean allQueryUseSQLFederation = ruleConfig.isAllQueryUseSQLFederation(); String executionPlanCache = null == ruleConfig.getExecutionPlanCache() ? "" : ruleConfig.getExecutionPlanCache().toString(); return Collections.singleton(new LocalDataQueryResultRow(sqlFederationEnabled, allQueryUseSQLFederation, executionPlanCache)); }
@Test void assertGetRows() throws SQLException { engine.executeQuery(); Collection<LocalDataQueryResultRow> actual = engine.getRows(); assertThat(actual.size(), is(1)); Iterator<LocalDataQueryResultRow> iterator = actual.iterator(); LocalDataQueryResultRow row = iterator.next(); assertThat(row.getCell(1), is("true")); assertThat(row.getCell(2), is("true")); assertThat(row.getCell(3), is("initialCapacity: 2000, maximumSize: 65535")); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatUndefineStatement() { final String statementString = "UNDEFINE _topic;"; final Statement statement = parseSingle(statementString); final String result = SqlFormatter.formatSql(statement); assertThat(result, is("UNDEFINE _topic")); }
public FloatArrayAsIterable usingExactEquality() { return new FloatArrayAsIterable(EXACT_EQUALITY_CORRESPONDENCE, iterableSubject()); }
@Test public void usingExactEquality_contains_successWithNaN() { assertThat(array(1.0f, NaN, 3.0f)).usingExactEquality().contains(NaN); }
public static CronPattern of(String pattern) { return new CronPattern(pattern); }
@Test public void patternNegativeTest() { // -4表示倒数的数字,此处在小时上,-4表示 23 - 4,为19 CronPattern pattern = CronPattern.of("* 0 -4 * * ?"); assertMatch(pattern, "2017-02-09 19:00:00"); assertMatch(pattern, "2017-02-19 19:00:33"); }
public void registerCommand(String commandName, CommandHandler handler) { if (StringUtil.isEmpty(commandName) || handler == null) { return; } if (handlerMap.containsKey(commandName)) { CommandCenterLog.warn("[NettyHttpCommandCenter] Register failed (duplicate command): " + commandName); return; } handlerMap.put(commandName, handler); }
@Test public void testRegisterCommand() { String commandName; CommandHandler handler; // If commandName is null, no handler added in handlerMap commandName = null; handler = new VersionCommandHandler(); httpServer.registerCommand(commandName, handler); assertEquals(0, HttpServer.handlerMap.size()); // If commandName is "", no handler added in handlerMap commandName = ""; handler = new VersionCommandHandler(); httpServer.registerCommand(commandName, handler); assertEquals(0, HttpServer.handlerMap.size()); // If handler is null, no handler added in handlerMap commandName = "version"; handler = null; httpServer.registerCommand(commandName, handler); assertEquals(0, HttpServer.handlerMap.size()); // Add one handler, commandName:version, handler:VersionCommandHandler commandName = "version"; handler = new VersionCommandHandler(); httpServer.registerCommand(commandName, handler); assertEquals(1, HttpServer.handlerMap.size()); // Add the same name Handler, no handler added in handlerMap commandName = "version"; handler = new VersionCommandHandler(); httpServer.registerCommand(commandName, handler); assertEquals(1, HttpServer.handlerMap.size()); // Add another handler, commandName:basicInfo, handler:BasicInfoCommandHandler commandName = "basicInfo"; handler = new BasicInfoCommandHandler(); httpServer.registerCommand(commandName, handler); assertEquals(2, HttpServer.handlerMap.size()); }
public HivePartitionStats getTableStatistics(String dbName, String tblName) { org.apache.hadoop.hive.metastore.api.Table table = client.getTable(dbName, tblName); HiveCommonStats commonStats = toHiveCommonStats(table.getParameters()); long totalRowNums = commonStats.getRowNums(); if (totalRowNums == -1) { return HivePartitionStats.empty(); } List<String> dataColumns = table.getSd().getCols().stream() .map(FieldSchema::getName) .collect(toImmutableList()); List<ColumnStatisticsObj> statisticsObjs = client.getTableColumnStats(dbName, tblName, dataColumns); if (statisticsObjs.isEmpty() && Config.enable_reuse_spark_column_statistics) { // Try to use spark unpartitioned table column stats try { if (table.getParameters().keySet().stream().anyMatch(k -> k.startsWith("spark.sql.statistics.colStats."))) { statisticsObjs = HiveMetastoreApiConverter.getColStatsFromSparkParams(table); } } catch (Exception e) { LOG.warn("Failed to get column stats from table [{}.{}]", dbName, tblName); } } Map<String, HiveColumnStats> columnStatistics = HiveMetastoreApiConverter.toSinglePartitionColumnStats(statisticsObjs, totalRowNums); return new HivePartitionStats(commonStats, columnStatistics); }
@Test public void testGetTableStatistics() { HiveMetaClient client = new MockedHiveMetaClient(); HiveMetastore metastore = new HiveMetastore(client, "hive_catalog", MetastoreType.HMS); HivePartitionStats statistics = metastore.getTableStatistics("db1", "table1"); HiveCommonStats commonStats = statistics.getCommonStats(); Assert.assertEquals(50, commonStats.getRowNums()); Assert.assertEquals(100, commonStats.getTotalFileBytes()); HiveColumnStats columnStatistics = statistics.getColumnStats().get("col1"); Assert.assertEquals(0, columnStatistics.getTotalSizeBytes()); Assert.assertEquals(1, columnStatistics.getNumNulls()); Assert.assertEquals(2, columnStatistics.getNdv()); }
@Override public Future<?> shutdownGracefully() { return group.shutdownGracefully(); }
@Test public void testInvalidGroup() { final EventExecutorGroup group = new DefaultEventExecutorGroup(1); try { assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { new NonStickyEventExecutorGroup(group); } }); } finally { group.shutdownGracefully(); } }
public String getMessage() { return getFieldAs(String.class, FIELD_MESSAGE); }
@Test public void testGetMessage() throws Exception { assertEquals("foo", message.getMessage()); }
public String getEcosystem(DefCveItem cve) { final int[] ecosystemMap = new int[ECOSYSTEMS.length]; cve.getCve().getDescriptions().stream() .filter((langString) -> (langString.getLang().equals("en"))) .forEachOrdered((langString) -> search(langString.getValue(), ecosystemMap)); return getResult(ecosystemMap); }
@Test public void testJspLinksDoNotCountScoring() throws IOException { DescriptionEcosystemMapper mapper = new DescriptionEcosystemMapper(); String value = "Read more at https://domain/help.jsp."; assertNull(mapper.getEcosystem(asCve(value))); }
public void close() { close(Long.MAX_VALUE, false); }
@Test public void testCloseIsIdempotent() { prepareStreams(); prepareStreamThread(streamThreadOne, 1); prepareStreamThread(streamThreadTwo, 2); try (final KafkaStreams streams = new KafkaStreams(getBuilderWithSource().build(), props, supplier, time)) { streams.close(); final int closeCount = MockMetricsReporter.CLOSE_COUNT.get(); streams.close(); assertEquals(closeCount, MockMetricsReporter.CLOSE_COUNT.get(), "subsequent close() calls should do nothing"); } }
@Override final void requestSubpartitions() { throw new UnsupportedOperationException( "RecoveredInputChannel should never request partition."); }
@Test void testRequestPartitionsImpossible() { assertThatThrownBy(() -> buildChannel().requestSubpartitions()) .isInstanceOf(UnsupportedOperationException.class); }
public TxnCoordinator getCoordinator() { return txnCoordinator; }
@Test public void testSerDe() { UUID uuid = UUID.randomUUID(); TransactionState transactionState = new TransactionState(1000L, Lists.newArrayList(20000L, 20001L), 3000, "label123", new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()), LoadJobSourceType.BACKEND_STREAMING, new TxnCoordinator(TxnSourceType.BE, "127.0.0.1"), 50000L, 60 * 1000L); String json = GsonUtils.GSON.toJson(transactionState); TransactionState readTransactionState = GsonUtils.GSON.fromJson(json, TransactionState.class); Assert.assertEquals(transactionState.getCoordinator().ip, readTransactionState.getCoordinator().ip); }
public static int getVersionNumber(String version) { if (version == null) { return -1; } String[] vs = version.split("\\."); int sum = 0; for (int i = 0; i < vs.length; i++) { try { sum = sum * 10 + Integer.parseInt(vs[i]); } catch (Exception e) { // ignore } } return sum; }
@Test void testGetVersionNumber() { assertEquals(-1, Protocol.getVersionNumber(null)); assertEquals(0, Protocol.getVersionNumber("")); assertEquals(120, Protocol.getVersionNumber("1.2.0")); assertEquals(10, Protocol.getVersionNumber("1.A.0")); }
@Override public void checkpointStarted(CheckpointBarrier barrier) throws CheckpointException { throw new CheckpointException(CHECKPOINT_DECLINED_TASK_NOT_READY); }
@Test void testCheckpointStartImpossible() { assertThatThrownBy( () -> buildChannel() .checkpointStarted( new CheckpointBarrier( 0L, 0L, unaligned( CheckpointType.CHECKPOINT, getDefault())))) .isInstanceOf(CheckpointException.class); }
public static <T> void forEachWithIndex(Iterable<T> iterable, ObjectIntProcedure<? super T> procedure) { FJIterate.forEachWithIndex(iterable, procedure, FJIterate.FORK_JOIN_POOL); }
@Test public void testForEachWithIndexToArrayUsingArrayList() { Integer[] array = new Integer[200]; MutableList<Integer> list = FastList.newList(Interval.oneTo(200)); assertTrue(ArrayIterate.allSatisfy(array, Predicates.isNull())); FJIterate.forEachWithIndex(list, (each, index) -> array[index] = each, 10, 10); assertArrayEquals(array, list.toArray(new Integer[]{})); }
@Override public List<String> getServerList() { return serverList.isEmpty() ? serversFromEndpoint : serverList; }
@Test void testConstructWithAddr() { Properties properties = new Properties(); properties.put(PropertyKeyConst.SERVER_ADDR, "127.0.0.1:8848,127.0.0.1:8849"); serverListManager = new ServerListManager(properties); final List<String> serverList = serverListManager.getServerList(); assertEquals(2, serverList.size()); assertEquals("127.0.0.1:8848", serverList.get(0)); assertEquals("127.0.0.1:8849", serverList.get(1)); }
@Override public Object remove(String key) { throw new UnsupportedOperationException(); }
@Test public void testRemove() { assertThrowsUnsupportedOperation( () -> unmodifiables.remove("some.key")); }
@Override public void write(int b) throws IOException { checkClosed(); if (chunkSize - currentBufferPointer <= 0) { expandBuffer(); } currentBuffer.put((byte) b); currentBufferPointer++; pointer++; if (pointer > size) { size = pointer; } }
@Test void testRandomAccessRead() throws IOException { try (RandomAccess randomAccessReadWrite = new RandomAccessReadWriteBuffer()) { randomAccessReadWrite.write(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 }); assertEquals(11, randomAccessReadWrite.length()); randomAccessReadWrite.seek(0); assertEquals(11, randomAccessReadWrite.length()); byte[] bytesRead = new byte[11]; assertEquals(11, randomAccessReadWrite.read(bytesRead)); assertEquals(1, bytesRead[0]); assertEquals(7, bytesRead[6]); assertEquals(8, bytesRead[7]); assertEquals(11, bytesRead[10]); } catch (Throwable throwable) { fail("Unexpected exception " + throwable.getMessage()); } }
@Override public ContinuousEnumerationResult planSplits(IcebergEnumeratorPosition lastPosition) { table.refresh(); if (lastPosition != null) { return discoverIncrementalSplits(lastPosition); } else { return discoverInitialSplits(); } }
@Test public void testTableScanThenIncrementalWithEmptyTable() throws Exception { ScanContext scanContext = ScanContext.builder() .startingStrategy(StreamingStartingStrategy.TABLE_SCAN_THEN_INCREMENTAL) .build(); ContinuousSplitPlannerImpl splitPlanner = new ContinuousSplitPlannerImpl(TABLE_RESOURCE.tableLoader().clone(), scanContext, null); ContinuousEnumerationResult emptyTableInitialDiscoveryResult = splitPlanner.planSplits(null); assertThat(emptyTableInitialDiscoveryResult.splits()).isEmpty(); assertThat(emptyTableInitialDiscoveryResult.fromPosition()).isNull(); assertThat(emptyTableInitialDiscoveryResult.toPosition().isEmpty()).isTrue(); assertThat(emptyTableInitialDiscoveryResult.toPosition().snapshotTimestampMs()).isNull(); ContinuousEnumerationResult emptyTableSecondDiscoveryResult = splitPlanner.planSplits(emptyTableInitialDiscoveryResult.toPosition()); assertThat(emptyTableSecondDiscoveryResult.splits()).isEmpty(); assertThat(emptyTableSecondDiscoveryResult.fromPosition().isEmpty()).isTrue(); assertThat(emptyTableSecondDiscoveryResult.fromPosition().snapshotTimestampMs()).isNull(); assertThat(emptyTableSecondDiscoveryResult.toPosition().isEmpty()).isTrue(); assertThat(emptyTableSecondDiscoveryResult.toPosition().snapshotTimestampMs()).isNull(); // next 3 snapshots IcebergEnumeratorPosition lastPosition = emptyTableSecondDiscoveryResult.toPosition(); for (int i = 0; i < 3; ++i) { lastPosition = verifyOneCycle(splitPlanner, lastPosition).lastPosition; } }
@Override public OAuth2AccessTokenDO getAccessToken(String accessToken) { // 优先从 Redis 中获取 OAuth2AccessTokenDO accessTokenDO = oauth2AccessTokenRedisDAO.get(accessToken); if (accessTokenDO != null) { return accessTokenDO; } // 获取不到,从 MySQL 中获取 accessTokenDO = oauth2AccessTokenMapper.selectByAccessToken(accessToken); // 如果在 MySQL 存在,则往 Redis 中写入 if (accessTokenDO != null && !DateUtils.isExpired(accessTokenDO.getExpiresTime())) { oauth2AccessTokenRedisDAO.set(accessTokenDO); } return accessTokenDO; }
@Test public void testCheckAccessToken_success() { // mock 数据(访问令牌) OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class) .setExpiresTime(LocalDateTime.now().plusDays(1)); oauth2AccessTokenMapper.insert(accessTokenDO); // 准备参数 String accessToken = accessTokenDO.getAccessToken(); // 调研,并断言 OAuth2AccessTokenDO result = oauth2TokenService.getAccessToken(accessToken); // 断言 assertPojoEquals(accessTokenDO, result, "createTime", "updateTime", "deleted", "creator", "updater"); }
public static ProxyConfig getProxyConfig() { return configuration.getProxyConfig(); }
@Test public void testGetProxyConfig() { assertThat(ConfigurationManager.getProxyConfig()).isNotNull(); }
@Override public boolean add(V value) { lock.lock(); try { checkComparator(); BinarySearchResult<V> res = binarySearch(value, codec); if (res.getIndex() < 0) { int index = -(res.getIndex() + 1); ByteBuf encodedValue = encode(value); commandExecutor.get(commandExecutor.evalWriteNoRetryAsync(list.getRawName(), codec, RedisCommands.EVAL_VOID, "local len = redis.call('llen', KEYS[1]);" + "if tonumber(ARGV[1]) < len then " + "local pivot = redis.call('lindex', KEYS[1], ARGV[1]);" + "redis.call('linsert', KEYS[1], 'before', pivot, ARGV[2]);" + "return;" + "end;" + "redis.call('rpush', KEYS[1], ARGV[2]);", Arrays.<Object>asList(list.getRawName()), index, encodedValue)); return true; } else { return false; } } finally { lock.unlock(); } }
@Test public void testIteratorSequence() { Set<Integer> set = redisson.getSortedSet("set"); for (int i = 0; i < 1000; i++) { set.add(Integer.valueOf(i)); } Set<Integer> setCopy = new HashSet<Integer>(); for (int i = 0; i < 1000; i++) { setCopy.add(Integer.valueOf(i)); } checkIterator(set, setCopy); }
public String transform() throws ScanException { StringBuilder stringBuilder = new StringBuilder(); compileNode(node, stringBuilder, new Stack<Node>()); return stringBuilder.toString(); }
@Test public void LOGBACK729() throws ScanException { String input = "${${k0}.jdbc.url}"; Node node = makeNode(input); NodeToStringTransformer nodeToStringTransformer = new NodeToStringTransformer(node, propertyContainer0); Assertions.assertEquals("http://..", nodeToStringTransformer.transform()); }
public static Profiler create(Logger logger) { return new DefaultProfiler(logger); }
@Test public void create() { Profiler profiler = Profiler.create(LoggerFactory.getLogger("foo")); assertThat(profiler).isInstanceOf(DefaultProfiler.class); }
@Override public void open() { super.open(); for (String propertyKey : properties.stringPropertyNames()) { LOGGER.debug("propertyKey: {}", propertyKey); String[] keyValue = propertyKey.split("\\.", 2); if (2 == keyValue.length) { LOGGER.debug("key: {}, value: {}", keyValue[0], keyValue[1]); Properties prefixProperties; if (basePropertiesMap.containsKey(keyValue[0])) { prefixProperties = basePropertiesMap.get(keyValue[0]); } else { prefixProperties = new Properties(); basePropertiesMap.put(keyValue[0].trim(), prefixProperties); } prefixProperties.put(keyValue[1].trim(), getProperty(propertyKey)); } } Set<String> removeKeySet = new HashSet<>(); for (String key : basePropertiesMap.keySet()) { if (!COMMON_KEY.equals(key)) { Properties properties = basePropertiesMap.get(key); if (!properties.containsKey(DRIVER_KEY) || !properties.containsKey(URL_KEY)) { LOGGER.error("{} will be ignored. {}.{} and {}.{} is mandatory.", key, DRIVER_KEY, key, key, URL_KEY); removeKeySet.add(key); } } } for (String key : removeKeySet) { basePropertiesMap.remove(key); } LOGGER.debug("JDBC PropertiesMap: {}", basePropertiesMap); setMaxLineResults(); setMaxRows(); //TODO(zjffdu) Set different sql splitter for different sql dialects. this.sqlSplitter = new SqlSplitter(); }
@Test void testSelectQueryMaxResult() throws IOException, InterpreterException { Properties properties = new Properties(); properties.setProperty("common.max_count", "1"); properties.setProperty("common.max_retry", "3"); properties.setProperty("default.driver", "org.h2.Driver"); properties.setProperty("default.url", getJdbcConnection()); properties.setProperty("default.user", ""); properties.setProperty("default.password", ""); JDBCInterpreter t = new JDBCInterpreter(properties); t.open(); String sqlQuery = "select * from test_table"; InterpreterResult interpreterResult = t.interpret(sqlQuery, context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); assertEquals(InterpreterResult.Type.TABLE, resultMessages.get(0).getType()); assertEquals("ID\tNAME\na\ta_name\n", resultMessages.get(0).getData()); assertEquals(InterpreterResult.Type.HTML, resultMessages.get(1).getType()); assertTrue(resultMessages.get(1).getData().contains("Output is truncated")); }
@Description("Euler's number") @ScalarFunction @SqlType(StandardTypes.DOUBLE) public static double e() { return Math.E; }
@Test public void testE() { assertFunction("e()", DOUBLE, Math.E); }
public static Row toBeamRow(GenericRecord record, Schema schema, ConversionOptions options) { List<Object> valuesInOrder = schema.getFields().stream() .map( field -> { try { org.apache.avro.Schema.Field avroField = record.getSchema().getField(field.getName()); Object value = avroField != null ? record.get(avroField.pos()) : null; return convertAvroFormat(field.getType(), value, options); } catch (Exception cause) { throw new IllegalArgumentException( "Error converting field " + field + ": " + cause.getMessage(), cause); } }) .collect(toList()); return Row.withSchema(schema).addValues(valuesInOrder).build(); }
@Test public void testToBeamRow_avro_array_row() { Row flatRowExpected = Row.withSchema(AVRO_FLAT_TYPE).addValues(123L, 123.456, "test", false).build(); Row expected = Row.withSchema(AVRO_ARRAY_TYPE).addValues((Object) Arrays.asList(flatRowExpected)).build(); GenericData.Record record = new GenericData.Record(AvroUtils.toAvroSchema(AVRO_ARRAY_TYPE)); GenericData.Record flat = new GenericData.Record(AvroUtils.toAvroSchema(AVRO_FLAT_TYPE)); flat.put("id", 123L); flat.put("value", 123.456); flat.put("name", "test"); flat.put("valid", false); record.put("rows", Arrays.asList(flat)); Row beamRow = BigQueryUtils.toBeamRow( record, AVRO_ARRAY_TYPE, BigQueryUtils.ConversionOptions.builder().build()); assertEquals(expected, beamRow); }
public WorkflowInstance.Status getWorkflowInstanceStatus( String workflowId, long workflowInstanceId, long workflowRunId) { String status = getWorkflowInstanceRawStatus(workflowId, workflowInstanceId, workflowRunId); return withMetricLogError( () -> { if (status == null) { return null; } return WorkflowInstance.Status.create(status); }, "getWorkflowInstanceStatus", "Failed to parse the workflow instance status [{}] for [{}][{}][{}]", status, workflowId, workflowInstanceId, workflowRunId); }
@Test public void testGetWorkflowInstanceStatus() { WorkflowInstance.Status status = instanceDao.getWorkflowInstanceStatus( wfi.getWorkflowId(), wfi.getWorkflowInstanceId(), wfi.getWorkflowRunId()); assertEquals(WorkflowInstance.Status.CREATED, status); boolean res = instanceDao.tryTerminateQueuedInstance(wfi, WorkflowInstance.Status.FAILED, "test-reason"); assertTrue(res); status = instanceDao.getWorkflowInstanceStatus( wfi.getWorkflowId(), wfi.getWorkflowInstanceId(), wfi.getWorkflowRunId()); assertEquals(WorkflowInstance.Status.FAILED, status); }
public static KiePMMLDroolsAST getKiePMMLDroolsAST(final List<Field<?>> fields, final Scorecard model, final Map<String, KiePMMLOriginalTypeGeneratedType> fieldTypeMap, final List<KiePMMLDroolsType> types) { logger.trace("getKiePMMLDroolsAST {}", model); return KiePMMLScorecardModelASTFactory.getKiePMMLDroolsAST(fields, model, fieldTypeMap, types); }
@Test void getKiePMMLDroolsAST() { final DataDictionary dataDictionary = pmml.getDataDictionary(); final Map<String, KiePMMLOriginalTypeGeneratedType> fieldTypeMap = getFieldTypeMap(dataDictionary, pmml.getTransformationDictionary(), scorecardModel.getLocalTransformations()); KiePMMLDroolsAST retrieved = KiePMMLScorecardModelFactory.getKiePMMLDroolsAST(getFieldsFromDataDictionary(dataDictionary), scorecardModel, fieldTypeMap, Collections.emptyList()); assertThat(retrieved).isNotNull(); }
public final void isLessThan(int other) { asDouble.isLessThan(other); }
@Test public void isLessThan_int_strictly() { expectFailureWhenTestingThat(2.0f).isLessThan(1); }
public final TraceContext decorate(TraceContext context) { long traceId = context.traceId(), spanId = context.spanId(); E claimed = null; int existingIndex = -1, extraLength = context.extra().size(); for (int i = 0; i < extraLength; i++) { Object next = context.extra().get(i); if (next instanceof Extra) { Extra nextExtra = (Extra) next; // Don't interfere with other instances or subtypes if (nextExtra.factory != this) continue; if (claimed == null && nextExtra.tryToClaim(traceId, spanId)) { claimed = (E) nextExtra; continue; } if (existingIndex == -1) { existingIndex = i; } else { Platform.get().log("BUG: something added redundant extra instances %s", context, null); return context; } } } // Easiest when there is neither existing state to assign, nor need to change context.extra() if (claimed != null && existingIndex == -1) { return context; } // If context.extra() didn't have an unclaimed extra instance, create one for this context. if (claimed == null) { claimed = create(); if (claimed == null) { Platform.get().log("BUG: create() returned null", null); return context; } claimed.tryToClaim(traceId, spanId); } TraceContext.Builder builder = context.toBuilder().clearExtra().addExtra(claimed); for (int i = 0; i < extraLength; i++) { Object next = context.extra().get(i); if (i == existingIndex) { E existing = (E) next; // If the claimed extra instance was new or had no changes, simply assign existing to it if (claimed.state == initialState) { claimed.state = existing.state; } else if (existing.state != initialState) { claimed.mergeStateKeepingOursOnConflict(existing); } } else if (!next.equals(claimed)) { builder.addExtra(next); } } return builder.build(); }
@Test void decorate_makesNewExtra() { List<TraceContext> contexts = asList( context.toBuilder().build(), context.toBuilder().addExtra(1L).build(), context.toBuilder().addExtra(1L).addExtra(2L).build() ); for (TraceContext context : contexts) { // adds a new extra container and claims it against the current context. TraceContext ensured = factory.decorate(context); assertThat(ensured.extra()) .hasSize(context.extra().size() + 1) .containsAll(context.extra()); assertExtraClaimed(ensured); } }
@Override void toHtml() throws IOException { final List<CounterError> errors = counter.getErrors(); if (errors.isEmpty()) { writeln("#Aucune_erreur#"); } else { writeErrors(errors); } }
@Test public void testCounterError() throws IOException { final Counter errorCounter = new Counter(Counter.ERROR_COUNTER_NAME, null); final StringWriter writer = new StringWriter(); final HtmlCounterErrorReport report = new HtmlCounterErrorReport(errorCounter, writer); report.toHtml(); assertNotEmptyAndClear(writer); while (errorCounter.getErrorsCount() < Counter.MAX_ERRORS_COUNT) { errorCounter.addErrors(Collections.singletonList(new CounterError("erreur", null))); } report.toHtml(); assertNotEmptyAndClear(writer); final HttpServletRequest httpRequest = createNiceMock(HttpServletRequest.class); expect(httpRequest.getAttribute(CounterError.REQUEST_KEY)).andReturn("/test GET"); expect(httpRequest.getRemoteUser()).andReturn("me"); replay(httpRequest); CounterError.bindRequest(httpRequest); errorCounter.addErrors(Collections.singletonList(new CounterError("with request", null))); CounterError.unbindRequest(); verify(httpRequest); report.toHtml(); assertNotEmptyAndClear(writer); }
@Bean public ShenyuPlugin loggingKafkaPlugin() { return new LoggingKafkaPlugin(); }
@Test public void testLoggingKafkaPlugin() { applicationContextRunner .withPropertyValues( "debug=true", "shenyu.logging.kafka.enabled=true" ) .run(context -> { PluginDataHandler pluginDataHandler = context.getBean("loggingKafkaPluginDataHandler", PluginDataHandler.class); assertNotNull(pluginDataHandler); ShenyuPlugin plugin = context.getBean("loggingKafkaPlugin", ShenyuPlugin.class); assertNotNull(plugin); assertThat(plugin.named()).isEqualTo(PluginEnum.LOGGING_KAFKA.getName()); }); }
public Set<SubscriberRedoData> findSubscriberRedoData() { Set<SubscriberRedoData> result = new HashSet<>(); synchronized (subscribes) { for (SubscriberRedoData each : subscribes.values()) { if (each.isNeedRedo()) { result.add(each); } } } return result; }
@Test void testFindSubscriberRedoData() { redoService.cacheSubscriberForRedo(SERVICE, GROUP, CLUSTER); assertFalse(redoService.findSubscriberRedoData().isEmpty()); redoService.subscriberRegistered(SERVICE, GROUP, CLUSTER); assertTrue(redoService.findSubscriberRedoData().isEmpty()); redoService.subscriberDeregister(SERVICE, GROUP, CLUSTER); assertFalse(redoService.findSubscriberRedoData().isEmpty()); }
@NotNull public SocialUserDO authSocialUser(Integer socialType, Integer userType, String code, String state) { // 优先从 DB 中获取,因为 code 有且可以使用一次。 // 在社交登录时,当未绑定 User 时,需要绑定登录,此时需要 code 使用两次 SocialUserDO socialUser = socialUserMapper.selectByTypeAndCodeAnState(socialType, code, state); if (socialUser != null) { return socialUser; } // 请求获取 AuthUser authUser = socialClientService.getAuthUser(socialType, userType, code, state); Assert.notNull(authUser, "三方用户不能为空"); // 保存到 DB 中 socialUser = socialUserMapper.selectByTypeAndOpenid(socialType, authUser.getUuid()); if (socialUser == null) { socialUser = new SocialUserDO(); } socialUser.setType(socialType).setCode(code).setState(state) // 需要保存 code + state 字段,保证后续可查询 .setOpenid(authUser.getUuid()).setToken(authUser.getToken().getAccessToken()).setRawTokenInfo((toJsonString(authUser.getToken()))) .setNickname(authUser.getNickname()).setAvatar(authUser.getAvatar()).setRawUserInfo(toJsonString(authUser.getRawUserInfo())); if (socialUser.getId() == null) { socialUserMapper.insert(socialUser); } else { socialUserMapper.updateById(socialUser); } return socialUser; }
@Test public void testAuthSocialUser_insert() { // 准备参数 Integer socialType = SocialTypeEnum.GITEE.getType(); Integer userType = randomEle(SocialTypeEnum.values()).getType(); String code = "tudou"; String state = "yuanma"; // mock 方法 AuthUser authUser = randomPojo(AuthUser.class); when(socialClientService.getAuthUser(eq(socialType), eq(userType), eq(code), eq(state))).thenReturn(authUser); // 调用 SocialUserDO result = socialUserService.authSocialUser(socialType, userType, code, state); // 断言 assertBindSocialUser(socialType, result, authUser); assertEquals(code, result.getCode()); assertEquals(state, result.getState()); }
public static boolean isEcCoordBase64Valid(String s) { return s.matches("(?:[A-Za-z0-9-_]{4})*(?:[A-Za-z0-9-_]{2}|[A-Za-z0-9-_]{3})"); }
@Test public void testIsEcCoordBase64Valid() { BigInteger example = new BigInteger( "3229926951468396372745881109827389213802338353528900010374647556550771243437"); String message = Base64.getEncoder().encodeToString(example.toByteArray()); // Format to standard message = message.replaceAll("[+]", "-") .replaceAll("[/]", "_") .substring(0, message.length() - 1); assertTrue(isEcCoordBase64Valid(message)); assertFalse(isEcCoordBase64Valid(example.toString())); }
@Operation(summary = "getUserInfo", description = "GET_USER_INFO_NOTES") @GetMapping(value = "/get-user-info") @ResponseStatus(HttpStatus.OK) @ApiException(GET_USER_INFO_ERROR) public Result getUserInfo(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { Map<String, Object> result = usersService.getUserInfo(loginUser); return returnDataList(result); }
@Test public void testGetUserInfo() throws Exception { MvcResult mvcResult = mockMvc.perform(get("/users/get-user-info") .header(SESSION_ID, sessionId)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }
@VisibleForTesting List<DeletionMeta> perWorkerDirCleanup(long size) { return workerLogs.getAllWorkerDirs().stream() .map(dir -> { try { return directoryCleaner.deleteOldestWhileTooLarge(Collections.singletonList(dir), size, true, null); } catch (IOException e) { throw new RuntimeException(e); } }) .collect(toList()); }
@Test public void testPerWorkerDirectoryCleanup() throws IOException { long nowMillis = Time.currentTimeMillis(); try (TmpPath testDir = new TmpPath()) { Files.createDirectories(testDir.getFile().toPath()); Path rootDir = createDir(testDir.getFile().toPath(), "workers-artifacts"); Path topo1Dir = createDir(rootDir, "topo1"); Path topo2Dir = createDir(rootDir, "topo2"); Path port1Dir = createDir(topo1Dir, "port1"); Path port2Dir = createDir(topo1Dir, "port2"); Path port3Dir = createDir(topo2Dir, "port3"); IntStream.range(0, 10) .forEach(idx -> createFile(port1Dir, "A" + idx, nowMillis + 100L * idx, 200)); IntStream.range(0, 10) .forEach(idx -> createFile(port2Dir, "B" + idx, nowMillis + 100L * idx, 200)); IntStream.range(0, 10) .forEach(idx -> createFile(port3Dir, "C" + idx, nowMillis + 100L * idx, 200)); Map<String, Object> conf = Utils.readStormConfig(); StormMetricsRegistry metricRegistry = new StormMetricsRegistry(); WorkerLogs workerLogs = new WorkerLogs(conf, rootDir, metricRegistry); LogCleaner logCleaner = new LogCleaner(conf, workerLogs, new DirectoryCleaner(metricRegistry), rootDir, metricRegistry); List<Integer> deletedFiles = logCleaner.perWorkerDirCleanup(1200) .stream() .map(deletionMeta -> deletionMeta.deletedFiles) .collect(toList()); assertEquals(Integer.valueOf(4), deletedFiles.get(0)); assertEquals(Integer.valueOf(4), deletedFiles.get(1)); assertEquals(Integer.valueOf(4), deletedFiles.get(deletedFiles.size() - 1)); } }
@VisibleForTesting void handleResponse(DiscoveryResponseData response) { ResourceType resourceType = response.getResourceType(); switch (resourceType) { case NODE: handleD2NodeResponse(response); break; case D2_URI_MAP: handleD2URIMapResponse(response); break; case D2_URI: handleD2URICollectionResponse(response); break; default: throw new AssertionError("Missing case in enum switch: " + resourceType); } }
@Test public void testHandleD2URICollectionResponseWithData() { DiscoveryResponseData createUri1 = new DiscoveryResponseData(D2_URI, Collections.singletonList( Resource.newBuilder() .setVersion(VERSION1) .setName(URI_URN1) .setResource(Any.pack(D2URI_1)) .build() ), null, NONCE, null); XdsClientImplFixture fixture = new XdsClientImplFixture(); // subscriber original data is null fixture._clusterSubscriber.setData(null); fixture._xdsClientImpl.handleResponse(createUri1); fixture.verifyAckSent(1); verify(fixture._resourceWatcher).onChanged(eq(D2_URI_MAP_UPDATE_WITH_DATA1)); verifyZeroInteractions(fixture._serverMetricsProvider); D2URIMapUpdate actualData = (D2URIMapUpdate) fixture._clusterSubscriber.getData(); // subscriber data should be updated to D2_URI_MAP_UPDATE_WITH_DATA1 Assert.assertEquals(Objects.requireNonNull(actualData).getURIMap(), D2_URI_MAP_UPDATE_WITH_DATA1.getURIMap()); // subscriber original data is invalid, xds server latency won't be tracked fixture._clusterSubscriber.setData(new D2URIMapUpdate(null)); fixture._xdsClientImpl.handleResponse(createUri1); fixture.verifyAckSent(2); verify(fixture._resourceWatcher, times(2)).onChanged(eq(D2_URI_MAP_UPDATE_WITH_DATA1)); verifyZeroInteractions(fixture._serverMetricsProvider); DiscoveryResponseData createUri2Delete1 = new DiscoveryResponseData(D2_URI, Collections.singletonList( Resource.newBuilder() .setVersion(VERSION1) .setName(URI_URN2) .setResource(Any.pack(D2URI_2)) .build() ), Collections.singletonList(URI_URN1), NONCE, null); fixture._xdsClientImpl.handleResponse(createUri2Delete1); actualData = (D2URIMapUpdate) fixture._clusterSubscriber.getData(); // subscriber data should be updated to D2_URI_MAP_UPDATE_WITH_DATA2 D2URIMapUpdate expectedUpdate = new D2URIMapUpdate(Collections.singletonMap(URI2, D2URI_2)); verify(fixture._resourceWatcher).onChanged(eq(expectedUpdate)); // track latency only for updated/new uri (not for deletion) verify(fixture._serverMetricsProvider).trackLatency(anyLong()); Assert.assertEquals(actualData.getURIMap(), expectedUpdate.getURIMap()); fixture.verifyAckSent(3); // Finally sanity check that the client correctly handles the deletion of the final URI in the collection DiscoveryResponseData deleteUri2 = new DiscoveryResponseData(D2_URI, null, Collections.singletonList(URI_URN2), NONCE, null); fixture._xdsClientImpl.handleResponse(deleteUri2); actualData = (D2URIMapUpdate) fixture._clusterSubscriber.getData(); // subscriber data should be updated to empty map expectedUpdate = new D2URIMapUpdate(Collections.emptyMap()); verify(fixture._resourceWatcher).onChanged(eq(expectedUpdate)); verifyNoMoreInteractions(fixture._serverMetricsProvider); Assert.assertEquals(actualData.getURIMap(), expectedUpdate.getURIMap()); fixture.verifyAckSent(4); }
public static synchronized AbstractAbilityControlManager getInstance() { if (null == abstractAbilityControlManager) { initAbilityControlManager(); } return abstractAbilityControlManager; }
@Test void testGetInstanceByWrongType() { assertThrows(ClassCastException.class, () -> { assertNotNull(NacosAbilityManagerHolder.getInstance(LowerMockAbilityManager.class)); }); }
public List<String> collectErrorsFromAllNodes() { List<String> errors = new ArrayList<>(); for (T node : mNodeResults.values()) { // add all the errors for this node, with the node appended to prefix for (String err : node.getErrors()) { errors.add(String.format("%s :%s", node.getBaseParameters().mId, err)); } } return errors; }
@Test public void collectErrorFromAllNodesWithEmptyResults() { // test summary with empty nodes TestMultipleNodeSummary summary = new TestMultipleNodeSummary(); List<String> emptyList = summary.collectErrorsFromAllNodes(); assertTrue(emptyList.isEmpty()); }
static void logTerminatingException( ConsoleLogger consoleLogger, Exception exception, boolean logStackTrace) { if (logStackTrace) { StringWriter writer = new StringWriter(); exception.printStackTrace(new PrintWriter(writer)); consoleLogger.log(LogEvent.Level.ERROR, writer.toString()); } consoleLogger.log( LogEvent.Level.ERROR, "\u001B[31;1m" + exception.getClass().getName() + ": " + exception.getMessage() + "\u001B[0m"); }
@Test public void testLogTerminatingException() { JibCli.logTerminatingException(logger, new IOException("test error message"), false); verify(logger) .log(LogEvent.Level.ERROR, "\u001B[31;1mjava.io.IOException: test error message\u001B[0m"); verifyNoMoreInteractions(logger); }
@Override public void onCreating(AbstractJob job) { JobDetails jobDetails = job.getJobDetails(); Optional<Job> jobAnnotation = getJobAnnotation(jobDetails); setJobName(job, jobAnnotation); setAmountOfRetries(job, jobAnnotation); setLabels(job, jobAnnotation); }
@Test void testDisplayNameFilterAlsoWorksWithJobContext() { Job job = anEnqueuedJob() .withoutName() .withJobDetails(jobDetails() .withClassName(TestService.class) .withMethodName("doWorkWithAnnotationAndJobContext") .withJobParameter(5) .withJobParameter("John Doe") .withJobParameter(JobParameter.JobContext)) .build(); defaultJobFilter.onCreating(job); assertThat(job.getJobName()).isEqualTo("Doing some hard work for user John Doe with id 5"); }
@Override public void markEvent() { meter.mark(); }
@Test void testMarkEvent() { com.codahale.metrics.Meter dropwizardMeter = mock(com.codahale.metrics.Meter.class); DropwizardMeterWrapper wrapper = new DropwizardMeterWrapper(dropwizardMeter); wrapper.markEvent(); verify(dropwizardMeter).mark(); }
@Override public void onEnd(CeTask ceTask) { // nothing to do }
@Test public void onEnd_has_no_effect() { CeTask ceTask = mock(CeTask.class); underTest.onEnd(ceTask); verifyNoInteractions(ceTask); }
public FEELFnResult<Object> invoke(@ParameterName("input") String input, @ParameterName("pattern") String pattern, @ParameterName( "replacement" ) String replacement ) { return invoke(input, pattern, replacement, null); }
@Test void invokeWithFlagCaseInsensitive() { FunctionTestUtil.assertResult(replaceFunction.invoke("foobar", "^fOO", "ttt", "i"), "tttbar"); }
public static TieredStorageTopicId convertId(IntermediateDataSetID intermediateDataSetID) { return new TieredStorageTopicId(intermediateDataSetID.getBytes()); }
@Test void testConvertResultPartitionId() { ResultPartitionID resultPartitionID = new ResultPartitionID(); TieredStoragePartitionId tieredStoragePartitionId = TieredStorageIdMappingUtils.convertId(resultPartitionID); ResultPartitionID convertedResultPartitionID = TieredStorageIdMappingUtils.convertId(tieredStoragePartitionId); assertThat(resultPartitionID).isEqualTo(convertedResultPartitionID); }
@Override protected String transform(ILoggingEvent event, String in) { AnsiElement element = ELEMENTS.get(getFirstOption()); List<Marker> markers = event.getMarkerList(); if ((markers != null && !markers.isEmpty() && markers.get(0).contains(CRLF_SAFE_MARKER)) || isLoggerSafe(event)) { return in; } String replacement = element == null ? "_" : toAnsiString("_", element); return in.replaceAll("[\n\r\t]", replacement); }
@Test void transformShouldReturnInputStringWhenMarkersContainCRLFSafeMarker() { ILoggingEvent event = mock(ILoggingEvent.class); Marker marker = MarkerFactory.getMarker("CRLF_SAFE"); List<Marker> markers = Collections.singletonList(marker); when(event.getMarkerList()).thenReturn(markers); String input = "Test input string"; CRLFLogConverter converter = new CRLFLogConverter(); String result = converter.transform(event, input); assertEquals(input, result); }
@Override public Map<String, Set<String>> getAllIndexAliases() { final Map<String, Set<String>> indexNamesAndAliases = indices.getIndexNamesAndAliases(getIndexWildcard()); // filter out the restored archives from the result set return indexNamesAndAliases.entrySet().stream() .filter(e -> isGraylogDeflectorIndex(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); }
@Test public void getAllGraylogDeflectorIndices() { final Map<String, Set<String>> indexNameAliases = ImmutableMap.of( "graylog_1", Collections.emptySet(), "graylog_2", Collections.emptySet(), "graylog_3", Collections.emptySet(), "graylog_4_restored_archive", Collections.emptySet(), "graylog_5", Collections.singleton("graylog_deflector")); when(indices.getIndexNamesAndAliases(anyString())).thenReturn(indexNameAliases); final MongoIndexSet mongoIndexSet = createIndexSet(config); final Map<String, Set<String>> deflectorIndices = mongoIndexSet.getAllIndexAliases(); assertThat(deflectorIndices).containsOnlyKeys("graylog_1", "graylog_2", "graylog_3", "graylog_5"); }
public final void setStrictness(Strictness strictness) { Objects.requireNonNull(strictness); this.strictness = strictness; }
@Test public void testCapitalizedFalseFailWhenStrict() { JsonReader reader = new JsonReader(reader("FALSE")); reader.setStrictness(Strictness.STRICT); IOException expected = assertThrows(IOException.class, reader::nextBoolean); assertThat(expected) .hasMessageThat() .startsWith( "Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON" + " at line 1 column 1 path $\n"); reader = new JsonReader(reader("FaLse")); reader.setStrictness(Strictness.STRICT); expected = assertThrows(IOException.class, reader::nextBoolean); assertThat(expected) .hasMessageThat() .startsWith( "Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON" + " at line 1 column 1 path $\n"); }
public static char getFirstLetter(char c) { return getEngine().getFirstLetter(c); }
@Test public void getFirstLetterTest(){ final String result = PinyinUtil.getFirstLetter("H是第一个", ", "); assertEquals("h, s, d, y, g", result); }
@SuppressWarnings("DataFlowIssue") public static CommandExecutor newInstance(final MySQLCommandPacketType commandPacketType, final CommandPacket commandPacket, final ConnectionSession connectionSession) throws SQLException { if (commandPacket instanceof SQLReceivedPacket) { log.debug("Execute packet type: {}, sql: {}", commandPacketType, ((SQLReceivedPacket) commandPacket).getSQL()); } else { log.debug("Execute packet type: {}", commandPacketType); } switch (commandPacketType) { case COM_QUIT: return new MySQLComQuitExecutor(); case COM_INIT_DB: return new MySQLComInitDbExecutor((MySQLComInitDbPacket) commandPacket, connectionSession); case COM_FIELD_LIST: return new MySQLComFieldListPacketExecutor((MySQLComFieldListPacket) commandPacket, connectionSession); case COM_QUERY: return new MySQLComQueryPacketExecutor((MySQLComQueryPacket) commandPacket, connectionSession); case COM_PING: return new MySQLComPingExecutor(connectionSession); case COM_STMT_PREPARE: return new MySQLComStmtPrepareExecutor((MySQLComStmtPreparePacket) commandPacket, connectionSession); case COM_STMT_EXECUTE: return new MySQLComStmtExecuteExecutor((MySQLComStmtExecutePacket) commandPacket, connectionSession); case COM_STMT_SEND_LONG_DATA: return new MySQLComStmtSendLongDataExecutor((MySQLComStmtSendLongDataPacket) commandPacket, connectionSession); case COM_STMT_RESET: return new MySQLComStmtResetExecutor((MySQLComStmtResetPacket) commandPacket, connectionSession); case COM_STMT_CLOSE: return new MySQLComStmtCloseExecutor((MySQLComStmtClosePacket) commandPacket, connectionSession); case COM_SET_OPTION: return new MySQLComSetOptionExecutor((MySQLComSetOptionPacket) commandPacket, connectionSession); case COM_RESET_CONNECTION: return new MySQLComResetConnectionExecutor(connectionSession); default: return new MySQLUnsupportedCommandExecutor(commandPacketType); } }
@Test void assertNewInstanceWithComStmtPrepare() throws SQLException { assertThat(MySQLCommandExecutorFactory.newInstance( MySQLCommandPacketType.COM_STMT_PREPARE, mock(MySQLComStmtPreparePacket.class), connectionSession), instanceOf(MySQLComStmtPrepareExecutor.class)); }
public ConnectionFactory connectionFactory(ConnectionFactory connectionFactory) { // It is common to implement both interfaces if (connectionFactory instanceof XAConnectionFactory) { return (ConnectionFactory) xaConnectionFactory((XAConnectionFactory) connectionFactory); } return TracingConnectionFactory.create(connectionFactory, this); }
@Test void connectionFactory_doesntDoubleWrap() { ConnectionFactory wrapped = jmsTracing.connectionFactory(mock(ConnectionFactory.class)); assertThat(jmsTracing.connectionFactory(wrapped)) .isSameAs(wrapped); }
@OnError public void onError(final Session session, final Throwable error) { clearSession(session); LOG.error("websocket collection on client[{}] error: ", getClientIp(session), error); }
@Test public void testOnError() { websocketCollector.onOpen(session); assertEquals(1L, getSessionSetSize()); doNothing().when(loggerSpy).error(anyString(), anyString(), isA(Throwable.class)); Throwable throwable = new Throwable(); websocketCollector.onError(session, throwable); assertEquals(0L, getSessionSetSize()); assertNull(getSession()); }
@JsonIgnore public List<String> getAllStepIds() { List<String> allStepIds = new ArrayList<>(steps.size()); getAllStepIds(steps, allStepIds); return allStepIds; }
@Test public void testGetAllStepIdsInNestedForeach() { TypedStep step = new TypedStep(); step.setId("foo"); ForeachStep foreachStep = new ForeachStep(); foreachStep.setId("foreach-step"); foreachStep.setSteps(Collections.nCopies(Constants.STEP_LIST_SIZE_LIMIT - 1, step)); Workflow workflow = Workflow.builder().steps(Collections.singletonList(foreachStep)).build(); assertEquals(Constants.STEP_LIST_SIZE_LIMIT, workflow.getAllStepIds().size()); List<String> expected = new ArrayList<>(); expected.add("foreach-step"); expected.addAll(Collections.nCopies(Constants.STEP_LIST_SIZE_LIMIT - 1, "foo")); assertEquals(expected, workflow.getAllStepIds()); }
protected T executeAutoCommitFalse(Object[] args) throws Exception { try { TableRecords beforeImage = beforeImage(); T result = statementCallback.execute(statementProxy.getTargetStatement(), args); TableRecords afterImage = afterImage(beforeImage); prepareUndoLog(beforeImage, afterImage); return result; } catch (TableMetaException e) { LOGGER.error("table meta will be refreshed later, due to TableMetaException, table:{}, column:{}", e.getTableName(), e.getColumnName()); statementProxy.getConnectionProxy().getDataSourceProxy().tableMetaRefreshEvent(); throw e; } }
@Test @Disabled public void testOnlySupportMysqlWhenUseMultiPk() throws Exception { Mockito.when(connectionProxy.getContext()) .thenReturn(new ConnectionContext()); PreparedStatementProxy statementProxy = Mockito.mock(PreparedStatementProxy.class); Mockito.when(statementProxy.getConnectionProxy()) .thenReturn(connectionProxy); StatementCallback statementCallback = Mockito.mock(StatementCallback.class); SQLInsertRecognizer sqlInsertRecognizer = Mockito.mock(SQLInsertRecognizer.class); TableMeta tableMeta = Mockito.mock(TableMeta.class); executor = Mockito.spy(new OracleInsertExecutor(statementProxy, statementCallback, sqlInsertRecognizer)); Mockito.when(executor.getDbType()).thenReturn(JdbcConstants.ORACLE); Mockito.doReturn(tableMeta).when(executor).getTableMeta(); Mockito.when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList("id","userCode")); executor.executeAutoCommitFalse(null); }
@Override protected double maintain() { if ( ! nodeRepository().nodes().isWorking()) return 0.0; if (nodeRepository().zone().environment().isTest()) return 1.0; int attempts = 0; int failures = 0; outer: for (var applicationNodes : activeNodesByApplication().entrySet()) { for (var clusterNodes : nodesByCluster(applicationNodes.getValue()).entrySet()) { if (shuttingDown()) break outer; attempts++; if ( ! autoscale(applicationNodes.getKey(), clusterNodes.getKey())) failures++; } } return asSuccessFactorDeviation(attempts, failures); }
@Test public void test_autoscaling_ignores_measurements_during_warmup() { ApplicationId app1 = AutoscalingMaintainerTester.makeApplicationId("app1"); ClusterSpec cluster1 = AutoscalingMaintainerTester.containerClusterSpec(); NodeResources resources = new NodeResources(4, 4, 10, 1); ClusterResources min = new ClusterResources(2, 1, resources); ClusterResources max = new ClusterResources(20, 1, resources); var capacity = Capacity.from(min, max); var tester = new AutoscalingMaintainerTester(new MockDeployer.ApplicationContext(app1, cluster1, capacity)); // Add a scaling event tester.deploy(app1, cluster1, capacity); tester.addMeasurements(1.0f, 0.3f, 0.3f, 0, 4, app1, cluster1.id()); tester.maintainer().maintain(); assertEquals("Scale up: " + tester.cluster(app1, cluster1).target().status(), 1, tester.cluster(app1, cluster1).lastScalingEvent().get().generation()); // measurements with outdated generation are ignored -> no autoscaling var duration = tester.addMeasurements(3.0f, 0.3f, 0.3f, 0, 2, app1, cluster1.id()); tester.maintainer().maintain(); assertEquals("Measurements with outdated generation are ignored -> no autoscaling", 1, tester.cluster(app1, cluster1).lastScalingEvent().get().generation()); tester.clock().advance(duration.negated()); duration = tester.addMeasurements(3.0f, 0.3f, 0.3f, 1, 2, app1, cluster1.id()); tester.maintainer().maintain(); assertEquals("Measurements right after generation change are ignored -> no autoscaling", 1, tester.cluster(app1, cluster1).lastScalingEvent().get().generation()); tester.clock().advance(duration.negated()); // Add a restart event tester.clock().advance(ClusterModel.warmupDuration.plus(Duration.ofMinutes(1))); tester.nodeRepository().nodes().list().owner(app1).asList().forEach(node -> recordRestart(node, tester.nodeRepository())); duration = tester.addMeasurements(3.0f, 0.3f, 0.3f, 1, 2, app1, cluster1.id()); tester.maintainer().maintain(); assertEquals("Measurements right after restart are ignored -> no autoscaling", 1, tester.cluster(app1, cluster1).lastScalingEvent().get().generation()); tester.clock().advance(duration.negated()); tester.clock().advance(ClusterModel.warmupDuration.plus(Duration.ofMinutes(1))); tester.addMeasurements(3.0f, 0.3f, 0.3f, 1, 2, app1, cluster1.id()); tester.maintainer().maintain(); assertEquals("We have valid measurements -> scale up", 2, tester.cluster(app1, cluster1).lastScalingEvent().get().generation()); }
@PreAuthorize("hasAnyAuthority('SYS_ADMIN', 'TENANT_ADMIN')") @GetMapping("/api/images") public PageData<TbResourceInfo> getImages(@Parameter(description = PAGE_SIZE_DESCRIPTION, required = true) @RequestParam int pageSize, @Parameter(description = PAGE_NUMBER_DESCRIPTION, required = true) @RequestParam int page, @Parameter(description = RESOURCE_IMAGE_SUB_TYPE_DESCRIPTION, schema = @Schema(allowableValues = {"IMAGE", "SCADA_SYMBOL"})) @RequestParam(required = false) String imageSubType, @Parameter(description = RESOURCE_INCLUDE_SYSTEM_IMAGES_DESCRIPTION) @RequestParam(required = false) boolean includeSystemImages, @Parameter(description = RESOURCE_TEXT_SEARCH_DESCRIPTION) @RequestParam(required = false) String textSearch, @Parameter(description = SORT_PROPERTY_DESCRIPTION, schema = @Schema(allowableValues = {"createdTime", "title", "resourceType", "tenantId"})) @RequestParam(required = false) String sortProperty, @Parameter(description = SORT_ORDER_DESCRIPTION, schema = @Schema(allowableValues = {"ASC", "DESC"})) @RequestParam(required = false) String sortOrder) throws ThingsboardException { // PE: generic permission PageLink pageLink = createPageLink(pageSize, page, textSearch, sortProperty, sortOrder); TenantId tenantId = getTenantId(); ResourceSubType subType = ResourceSubType.IMAGE; if (StringUtils.isNotEmpty(imageSubType)) { subType = ResourceSubType.valueOf(imageSubType); } if (getCurrentUser().getAuthority() == Authority.SYS_ADMIN || !includeSystemImages) { return checkNotNull(imageService.getImagesByTenantId(tenantId, subType, pageLink)); } else { return checkNotNull(imageService.getAllImagesByTenantId(tenantId, subType, pageLink)); } }
@Test public void testGetImages() throws Exception { loginSysAdmin(); String systemImageName = "my_system_png_image.png"; TbResourceInfo systemImage = uploadImage(HttpMethod.POST, "/api/image", systemImageName, "image/png", PNG_IMAGE); String systemScadaSymbolName = "my_system_scada_symbol_image.svg"; TbResourceInfo systemScadaSymbol = uploadImage(HttpMethod.POST, "/api/image", ResourceSubType.SCADA_SYMBOL.name(), systemScadaSymbolName, "image/svg+xml", SVG_IMAGE); loginTenantAdmin(); String tenantImageName = "my_jpeg_image.jpg"; TbResourceInfo tenantImage = uploadImage(HttpMethod.POST, "/api/image", tenantImageName, "image/jpeg", JPEG_IMAGE); String tenantScadaSymbolName = "my_scada_symbol_image.svg"; TbResourceInfo tenantScadaSymbol = uploadImage(HttpMethod.POST, "/api/image", ResourceSubType.SCADA_SYMBOL.name(), tenantScadaSymbolName, "image/svg+xml", SVG_IMAGE); List<TbResourceInfo> tenantImages = getImages(null, false, 10); assertThat(tenantImages).containsOnly(tenantImage); List<TbResourceInfo> tenantScadaSymbols = getImages(null, ResourceSubType.SCADA_SYMBOL.name(), false, 10); assertThat(tenantScadaSymbols).containsOnly(tenantScadaSymbol); List<TbResourceInfo> allImages = getImages(null, true, 10); assertThat(allImages).containsOnly(tenantImage, systemImage); List<TbResourceInfo> allScadaSymbols = getImages(null, ResourceSubType.SCADA_SYMBOL.name(), true, 10); assertThat(allScadaSymbols).containsOnly(tenantScadaSymbol, systemScadaSymbol); assertThat(getImages("png", true, 10)) .containsOnly(systemImage); assertThat(getImages("jpg", true, 10)) .containsOnly(tenantImage); assertThat(getImages("my_system_scada_symbol", ResourceSubType.SCADA_SYMBOL.name(), true, 10)) .containsOnly(systemScadaSymbol); assertThat(getImages("my_scada_symbol", ResourceSubType.SCADA_SYMBOL.name(),true, 10)) .containsOnly(tenantScadaSymbol); }
public void setConnectionSubjectFactory(ConnectionSubjectFactory connectionSubjectFactory) { if (connectionSubjectFactory == null) { throw new IllegalArgumentException("ConnectionSubjectFactory argument cannot be null."); } this.connectionSubjectFactory = connectionSubjectFactory; }
@Test(expected = IllegalArgumentException.class) public void setNullSubjectConnectionFactory() { filter.setConnectionSubjectFactory(null); }
protected abstract void validatePaths(DistCpContext distCpContext) throws IOException, InvalidInputException;
@Test(timeout=10000) public void testMultipleSrcToFile() { FileSystem fs = null; try { fs = FileSystem.get(getConf()); List<Path> srcPaths = new ArrayList<Path>(); srcPaths.add(new Path("/tmp/in/1")); srcPaths.add(new Path("/tmp/in/2")); final Path target = new Path("/tmp/out/1"); TestDistCpUtils.createFile(fs, "/tmp/in/1"); TestDistCpUtils.createFile(fs, "/tmp/in/2"); fs.mkdirs(target); final DistCpOptions options = new DistCpOptions.Builder(srcPaths, target) .build(); validatePaths(new DistCpContext(options)); TestDistCpUtils.delete(fs, "/tmp"); //No errors fs.create(target).close(); try { validatePaths(new DistCpContext(options)); Assert.fail("Invalid inputs accepted"); } catch (InvalidInputException ignore) { } TestDistCpUtils.delete(fs, "/tmp"); srcPaths.clear(); srcPaths.add(new Path("/tmp/in/1")); fs.mkdirs(new Path("/tmp/in/1")); fs.create(target).close(); try { validatePaths(new DistCpContext(options)); Assert.fail("Invalid inputs accepted"); } catch (InvalidInputException ignore) { } TestDistCpUtils.delete(fs, "/tmp"); } catch (IOException e) { LOG.error("Exception encountered ", e); Assert.fail("Test input validation failed"); } finally { TestDistCpUtils.delete(fs, "/tmp"); } }
@Override public ByteBuf setMedium(int index, int value) { throw new ReadOnlyBufferException(); }
@Test public void testSetMedium() { final ByteBuf buf = newBuffer(wrappedBuffer(new byte[8])); try { assertThrows(ReadOnlyBufferException.class, new Executable() { @Override public void execute() { buf.setMedium(0, 1); } }); } finally { buf.release(); } }
public static AmountRequest fromString(String amountRequestAsString) { if (isNullOrEmpty(amountRequestAsString)) return null; return new AmountRequest( lenientSubstringBetween(amountRequestAsString, "order=", "&"), Integer.parseInt(lenientSubstringBetween(amountRequestAsString, "limit=", "&")) ); }
@Test void testOffsetBasedPageRequestWithEmptyString() { OffsetBasedPageRequest offsetBasedPageRequest = OffsetBasedPageRequest.fromString(""); assertThat(offsetBasedPageRequest).isNull(); }
public static AbstractHealthChecker deserialize(String jsonString) { try { return MAPPER.readValue(jsonString, AbstractHealthChecker.class); } catch (IOException e) { throw new NacosDeserializationException(AbstractHealthChecker.class, e); } }
@Test void testDeserializeExtend() { String tcpString = "{\"type\":\"TEST\",\"testValue\":null}"; AbstractHealthChecker actual = HealthCheckerFactory.deserialize(tcpString); assertEquals(TestChecker.class, actual.getClass()); }
@Override public RelDataType deriveSumType(RelDataTypeFactory typeFactory, RelDataType argumentType) { if (argumentType instanceof BasicSqlType) { SqlTypeName type = deriveSumType(argumentType.getSqlTypeName()); if (type == BIGINT) { // special-case for BIGINT - we use BIGINT(64) instead of the default BIGINT(63) because // BIGINT + BIGINT can overflow. return HazelcastIntegerType.create(Long.SIZE, argumentType.isNullable()); } if (type.allowsPrec() && argumentType.getPrecision() != RelDataType.PRECISION_NOT_SPECIFIED) { int precision = typeFactory.getTypeSystem().getMaxPrecision(type); if (type.allowsScale()) { return typeFactory.createTypeWithNullability( typeFactory.createSqlType(type, precision, argumentType.getScale()), argumentType.isNullable() ); } else { return typeFactory.createTypeWithNullability( typeFactory.createSqlType(type, precision), argumentType.isNullable() ); } } else { return typeFactory.createTypeWithNullability( typeFactory.createSqlType(type), argumentType.isNullable() ); } } return argumentType; }
@Test public void deriveSumTypeTest() { final HazelcastIntegerType bigint_64 = HazelcastIntegerType.create(64, false); assertEquals(type(VARCHAR), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(VARCHAR))); assertEquals(type(BOOLEAN), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(BOOLEAN))); assertEquals(bigint_64, HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(TINYINT))); assertEquals(bigint_64, HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(SMALLINT))); assertEquals(bigint_64, HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(INTEGER))); assertEquals(type(DECIMAL), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(BIGINT))); assertEquals(type(DECIMAL), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(DECIMAL))); assertEquals(type(DOUBLE), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(REAL))); assertEquals(type(DOUBLE), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(DOUBLE))); assertEquals(type(TIME), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(TIME))); assertEquals(type(DATE), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(DATE))); assertEquals(type(TIMESTAMP), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(TIMESTAMP))); assertEquals( type(TIMESTAMP_WITH_LOCAL_TIME_ZONE), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(TIMESTAMP_WITH_LOCAL_TIME_ZONE)) ); assertEquals(type(OTHER), HazelcastTypeSystem.INSTANCE.deriveSumType(TYPE_FACTORY, type(OTHER))); }
static TimeUnit parseTimeUnit(String key, @Nullable String value) { requireArgument((value != null) && !value.isEmpty(), "value of key %s omitted", key); @SuppressWarnings("NullAway") char lastChar = Character.toLowerCase(value.charAt(value.length() - 1)); switch (lastChar) { case 'd': return TimeUnit.DAYS; case 'h': return TimeUnit.HOURS; case 'm': return TimeUnit.MINUTES; case 's': return TimeUnit.SECONDS; default: throw new IllegalArgumentException(String.format(US, "key %s invalid format; was %s, must end with one of [dDhHmMsS]", key, value)); } }
@Test public void parseTimeUnit_exception() { assertThrows(IllegalArgumentException.class, () -> CaffeineSpec.parseTimeUnit("key", "value")); }
@Override public String getMessage() { if (!logPhi) { return super.getMessage(); } String answer; if (hasHl7MessageBytes() || hasHl7AcknowledgementBytes()) { String parentMessage = super.getMessage(); StringBuilder messageBuilder = new StringBuilder( parentMessage.length() + (hasHl7MessageBytes() ? hl7MessageBytes.length : 0) + (hasHl7AcknowledgementBytes() ? hl7AcknowledgementBytes.length : 0)); messageBuilder.append(parentMessage); if (hasHl7MessageBytes()) { messageBuilder.append("\n\t{hl7Message [") .append(hl7MessageBytes.length) .append("] = "); hl7Util.appendBytesAsPrintFriendlyString(messageBuilder, hl7MessageBytes, 0, hl7MessageBytes.length); messageBuilder.append('}'); } if (hasHl7AcknowledgementBytes()) { messageBuilder.append("\n\t{hl7Acknowledgement [") .append(hl7AcknowledgementBytes.length) .append("] = "); hl7Util.appendBytesAsPrintFriendlyString(messageBuilder, hl7AcknowledgementBytes, 0, hl7AcknowledgementBytes.length); messageBuilder.append('}'); } answer = messageBuilder.toString(); } else { answer = super.getMessage(); } return answer; }
@Test public void testEmptyHl7Message() { instance = new MllpException(EXCEPTION_MESSAGE, EMPTY_BYTE_ARRAY, HL7_ACKNOWLEDGEMENT_BYTES, LOG_PHI_TRUE); assertEquals(expectedMessage(null, HL7_ACKNOWLEDGEMENT), instance.getMessage()); }
public void setLoadDataRetryDelayMillis(long loadDataRetryDelayMillis) { this.loadDataRetryDelayMillis = loadDataRetryDelayMillis; }
@Test void testSetLoadDataRetryDelayMillis() { distroConfig.setLoadDataRetryDelayMillis(loadDataRetryDelayMillis); assertEquals(loadDataRetryDelayMillis, distroConfig.getLoadDataRetryDelayMillis()); }
@Override public double getStdDev() { // two-pass algorithm for variance, avoids numeric overflow if (values.length <= 1) { return 0; } final double mean = getMean(); double variance = 0; for (int i = 0; i < values.length; i++) { final double diff = values[i] - mean; variance += normWeights[i] * diff * diff; } return Math.sqrt(variance); }
@Test public void calculatesAStdDevOfZeroForASingletonSnapshot() { final Snapshot singleItemSnapshot = new WeightedSnapshot( weightedArray(new long[]{1}, new double[]{1.0})); assertThat(singleItemSnapshot.getStdDev()) .isZero(); }
@Override public void remove() { if (currentIterator == null) { throw new IllegalStateException("next() has not yet been called"); } currentIterator.remove(); }
@Test public void testRemoveWithEmpty() { Assertions.assertThrows(IllegalStateException.class, () -> { List<Integer> emptyList = new ArrayList<>(); CompositeIterable<Integer> compositeIterable = new CompositeIterable<Integer>( emptyList); compositeIterable.iterator().remove(); }); }